diff --git a/.gitbook.yaml b/.gitbook.yaml index 85fb7f6e54..bbdd0c57e3 100644 --- a/.gitbook.yaml +++ b/.gitbook.yaml @@ -4,4 +4,5 @@ structure: readme: README.md redirects: - reference/telemetry: ./reference/usage.md \ No newline at end of file + reference/telemetry: ./reference/usage.md + quickstart: ./getting-started/quickstart.md diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS deleted file mode 100644 index e26febe740..0000000000 --- a/.github/CODEOWNERS +++ /dev/null @@ -1,4 +0,0 @@ -# Each line is a file pattern followed by one or more owners. -# https://help.github.com/en/articles/about-code-owners - -* @woop @achals @tsotnet @feast-dev/maintainers diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index c405c1f084..2f2d0d2f5e 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -2,7 +2,7 @@ name: Bug report about: Create a report to help us improve title: '' -labels: '' +labels: 'kind/bug, priority/p2' assignees: '' --- diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md index bbcbbe7d61..d73d644481 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -2,7 +2,7 @@ name: Feature request about: Suggest an idea for this project title: '' -labels: '' +labels: 'kind/feature' assignees: '' --- diff --git a/.github/auto_assign-issues.yml b/.github/auto_assign-issues.yml deleted file mode 100644 index cf2c722131..0000000000 --- a/.github/auto_assign-issues.yml +++ /dev/null @@ -1,10 +0,0 @@ -# If enabled, auto-assigns users when a new issue is created -# Defaults to true, allows you to install the app globally, and disable on a per-repo basis -addAssignees: true - -# The list of users to assign to new issues. -# If empty or not provided, the repository owner is assigned -assignees: - - jklegar - - woop - - tsotnet diff --git a/.github/auto_assign.yml b/.github/auto_assign.yml new file mode 100644 index 0000000000..18151f7454 --- /dev/null +++ b/.github/auto_assign.yml @@ -0,0 +1,19 @@ +# Set to true to add reviewers to pull requests +addReviewers: false + +# Set to true to add assignees to pull requests +addAssignees: true + +# A list of assignees, overrides reviewers if set +assignees: + - woop + - tsotnet + - achals + - adchia + - felixwang9817 + +# A number of assignees to add to the pull request +# Set to 0 to add all of the assignees. +# Uses numberOfReviewers if unset. +numberOfAssignees: 1 + diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index d210cbe2f4..e8d00798c0 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -2,9 +2,10 @@ 1. Ensure that your code follows our code conventions: https://github.com/feast-dev/feast/blob/master/CONTRIBUTING.md#code-style--linting 2. Run unit tests and ensure that they are passing: https://github.com/feast-dev/feast/blob/master/CONTRIBUTING.md#unit-tests -3. If your change introduces any API changes, make sure to update the integration tests scripts here: https://github.com/feast-dev/feast/tree/master/sdk/python/tests or https://github.com/feast-dev/feast/tree/master/sdk/go +3. If your change introduces any API changes, make sure to update the integration tests here: https://github.com/feast-dev/feast/tree/master/sdk/python/tests 4. Make sure documentation is updated for your PR! -5. Make sure you have signed the CLA https://cla.developers.google.com/clas +5. Make sure your commits are signed: https://github.com/feast-dev/feast/blob/master/CONTRIBUTING.md#signing-off-commits +6. Make sure your PR title follows conventional commits (e.g. fix: [description] vs feat: [description]) --> @@ -16,16 +17,3 @@ Usage: `Fixes #`, or `Fixes (paste link of issue)`. --> Fixes # - -**Does this PR introduce a user-facing change?**: - -```release-note - -``` diff --git a/.github/workflows/integration_tests.yml b/.github/workflows/integration_tests.yml deleted file mode 100644 index 322ee0b6e5..0000000000 --- a/.github/workflows/integration_tests.yml +++ /dev/null @@ -1,64 +0,0 @@ -name: integration-tests - -on: - push: - branches: - - master - -jobs: - integration-test-python: - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - matrix: - python-version: [ 3.7, 3.8, 3.9 ] - os: [ ubuntu-latest ] - env: - OS: ${{ matrix.os }} - PYTHON: ${{ matrix.python-version }} - services: - redis: - image: redis - ports: - - 6379:6379 - options: >- - --health-cmd "redis-cli ping" - --health-interval 10s - --health-timeout 5s - --health-retries 5 - steps: - - uses: actions/checkout@v2 - - name: Setup Python - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - architecture: x64 - - name: Set up Cloud SDK - uses: google-github-actions/setup-gcloud@v0 - with: - project_id: ${{ secrets.GCP_PROJECT_ID }} - service_account_key: ${{ secrets.GCP_SA_KEY }} - export_default_credentials: true - - name: Use gcloud CLI - run: gcloud info - - name: Set up AWS SDK - uses: aws-actions/configure-aws-credentials@v1 - with: - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws-region: us-west-2 - - name: Use AWS CLI - run: aws sts get-caller-identity - - name: Install dependencies - run: make install-python-ci-dependencies - - name: Test python - run: FEAST_USAGE=False pytest -n 8 --cov=./ --cov-report=xml --verbose --color=yes sdk/python/tests --integration - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v1 - with: - token: ${{ secrets.CODECOV_TOKEN }} - files: ./coverage.xml - flags: integrationtests - env_vars: OS,PYTHON - fail_ci_if_error: true - verbose: true diff --git a/.github/workflows/java_master_only.yml b/.github/workflows/java_master_only.yml new file mode 100644 index 0000000000..a856fbe2cb --- /dev/null +++ b/.github/workflows/java_master_only.yml @@ -0,0 +1,121 @@ +name: java-integration-tests-and-build + +on: + push: + branches: + - master + tags: + - 'v*.*.*' + +jobs: + build-docker-images: + runs-on: ubuntu-latest + strategy: + matrix: + component: [feature-server-java] + env: + MAVEN_CACHE: gs://feast-templocation-kf-feast/.m2.2020-08-19.tar + REGISTRY: gcr.io/kf-feast + steps: + - uses: actions/checkout@v2 + with: + submodules: 'true' + - uses: google-github-actions/setup-gcloud@v0 + with: + version: '290.0.1' + export_default_credentials: true + project_id: ${{ secrets.GCP_PROJECT_ID }} + service_account_key: ${{ secrets.GCP_SA_KEY }} + - run: gcloud auth configure-docker --quiet + - name: Get m2 cache + run: | + infra/scripts/download-maven-cache.sh \ + --archive-uri ${MAVEN_CACHE} \ + --output-dir . + - name: Get version + run: echo "RELEASE_VERSION=${GITHUB_REF#refs/*/}" >> $GITHUB_ENV + - name: Build image + run: make build-${{ matrix.component }}-docker REGISTRY=${REGISTRY} VERSION=${GITHUB_SHA} + - name: Push image + run: make push-${{ matrix.component }}-docker REGISTRY=${REGISTRY} VERSION=${GITHUB_SHA} + - name: Push development Docker image + run: | + if [ ${GITHUB_REF#refs/*/} == "master" ]; then + docker tag ${REGISTRY}/${{ matrix.component }}:${GITHUB_SHA} ${REGISTRY}/${{ matrix.component }}:develop + docker push ${REGISTRY}/${{ matrix.component }}:develop + fi + + lint-java: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + with: + submodules: 'true' + - name: Lint java + run: make lint-java + + unit-test-java: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + with: + submodules: 'true' + - name: Set up JDK 11 + uses: actions/setup-java@v1 + with: + java-version: '11' + java-package: jdk + architecture: x64 + - uses: actions/cache@v2 + with: + path: ~/.m2/repository + key: ${{ runner.os }}-ut-maven-${{ hashFiles('**/pom.xml') }} + restore-keys: | + ${{ runner.os }}-ut-maven- + - name: Test java + run: make test-java-with-coverage + - uses: actions/upload-artifact@v2 + with: + name: java-coverage-report + path: ${{ github.workspace }}/docs/coverage/java/target/site/jacoco-aggregate/ + + integration-test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + with: + submodules: 'true' + - name: Set up JDK 11 + uses: actions/setup-java@v1 + with: + java-version: '11' + java-package: jdk + architecture: x64 + - uses: actions/setup-python@v2 + with: + python-version: '3.7' + architecture: 'x64' + - uses: actions/cache@v2 + with: + path: ~/.m2/repository + key: ${{ runner.os }}-it-maven-${{ hashFiles('**/pom.xml') }} + restore-keys: | + ${{ runner.os }}-it-maven- + - name: Set up gcloud SDK + uses: google-github-actions/setup-gcloud@v0 + with: + project_id: ${{ secrets.GCP_PROJECT_ID }} + service_account_key: ${{ secrets.GCP_SA_KEY }} + export_default_credentials: true + - name: Use gcloud CLI + run: gcloud info + - name: Set up AWS SDK + uses: aws-actions/configure-aws-credentials@v1 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: us-west-2 + - name: Use AWS CLI + run: aws sts get-caller-identity + - name: Run integration tests + run: make test-java-integration diff --git a/.github/workflows/java_pr.yml b/.github/workflows/java_pr.yml new file mode 100644 index 0000000000..9906c246ec --- /dev/null +++ b/.github/workflows/java_pr.yml @@ -0,0 +1,109 @@ +name: java_pr + +on: + pull_request_target: + types: + - opened + - synchronize + - labeled + +jobs: + lint-java: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + with: + # pull_request_target runs the workflow in the context of the base repo + # as such actions/checkout needs to be explicit configured to retrieve + # code from the PR. + ref: refs/pull/${{ github.event.pull_request.number }}/merge + submodules: recursive + - name: Lint java + run: make lint-java + + unit-test-java: + runs-on: ubuntu-latest + needs: lint-java + steps: + - uses: actions/checkout@v2 + with: + # pull_request_target runs the workflow in the context of the base repo + # as such actions/checkout needs to be explicit configured to retrieve + # code from the PR. + ref: refs/pull/${{ github.event.pull_request.number }}/merge + submodules: recursive + - name: Set up JDK 11 + uses: actions/setup-java@v1 + with: + java-version: '11' + java-package: jdk + architecture: x64 + - uses: actions/cache@v2 + with: + path: ~/.m2/repository + key: ${{ runner.os }}-ut-maven-${{ hashFiles('**/pom.xml') }} + restore-keys: | + ${{ runner.os }}-ut-maven- + - name: Test java + run: make test-java-with-coverage + - uses: actions/upload-artifact@v2 + with: + name: java-coverage-report + path: ${{ github.workspace }}/docs/coverage/java/target/site/jacoco-aggregate/ + + integration-test: + # all jobs MUST have this if check for 'ok-to-test' or 'approved' for security purposes. + if: + (github.event.action == 'labeled' && (github.event.label.name == 'lgtm' || github.event.label.name == 'approved' || github.event.label.name == 'ok-to-test')) || + (github.event.action != 'labeled' && (contains(github.event.pull_request.labels.*.name, 'ok-to-test') || contains(github.event.pull_request.labels.*.name, 'approved'))) + runs-on: ubuntu-latest + needs: unit-test-java + steps: + - uses: actions/checkout@v2 + with: + # pull_request_target runs the workflow in the context of the base repo + # as such actions/checkout needs to be explicit configured to retrieve + # code from the PR. + ref: refs/pull/${{ github.event.pull_request.number }}/merge + submodules: recursive + - name: Set up JDK 11 + uses: actions/setup-java@v1 + with: + java-version: '11' + java-package: jdk + architecture: x64 + - uses: actions/setup-python@v2 + with: + python-version: '3.7' + architecture: 'x64' + - uses: actions/cache@v2 + with: + path: ~/.m2/repository + key: ${{ runner.os }}-it-maven-${{ hashFiles('**/pom.xml') }} + restore-keys: | + ${{ runner.os }}-it-maven- + - name: Set up gcloud SDK + uses: google-github-actions/setup-gcloud@v0 + with: + project_id: ${{ secrets.GCP_PROJECT_ID }} + service_account_key: ${{ secrets.GCP_SA_KEY }} + export_default_credentials: true + - name: Use gcloud CLI + run: gcloud info + - name: Set up AWS SDK + uses: aws-actions/configure-aws-credentials@v1 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: us-west-2 + - name: Use AWS CLI + run: aws sts get-caller-identity + - name: Run integration tests + run: make test-java-integration + - name: Save report + uses: actions/upload-artifact@v2 + if: failure() + with: + name: it-report + path: spark/ingestion/target/test-reports/TestSuite.txt + retention-days: 5 diff --git a/.github/workflows/lint_pr.yml b/.github/workflows/lint_pr.yml new file mode 100644 index 0000000000..40c3dead00 --- /dev/null +++ b/.github/workflows/lint_pr.yml @@ -0,0 +1,24 @@ +name: lint-pr + +on: + pull_request_target: + types: + - opened + - edited + - synchronize + +jobs: + validate-title: + name: Validate PR title + runs-on: ubuntu-latest + steps: + - uses: amannn/action-semantic-pull-request@v4 + with: + # Must use uppercase + subjectPattern: ^(?=[A-Z]).+$ + subjectPatternError: | + The subject "{subject}" found in the pull request title "{title}" + didn't match the configured pattern. Please ensure that the subject + starts with an uppercase character. + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/linter.yml b/.github/workflows/linter.yml index 3501427c3f..971282966c 100644 --- a/.github/workflows/linter.yml +++ b/.github/workflows/linter.yml @@ -4,21 +4,66 @@ on: [push, pull_request] jobs: lint-python: - container: python:3.7 runs-on: [ubuntu-latest] + env: + PYTHON: 3.7 steps: - uses: actions/checkout@v2 + - name: Setup Python + id: setup-python + uses: actions/setup-python@v2 + with: + python-version: "3.7" + architecture: x64 + - name: Setup Go + id: setup-go + uses: actions/setup-go@v2 + with: + go-version: 1.17.7 + - name: Upgrade pip version + run: | + pip install --upgrade "pip>=21.3.1" + - name: Get pip cache dir + id: pip-cache + run: | + echo "::set-output name=dir::$(pip cache dir)" + - name: pip cache + uses: actions/cache@v2 + with: + path: | + ${{ steps.pip-cache.outputs.dir }} + /opt/hostedtoolcache/Python + /Users/runner/hostedtoolcache/Python + key: ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-${{ hashFiles(format('**/py{0}-ci-requirements.txt', env.PYTHON)) }} + restore-keys: | + ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip- + - name: Install pip-tools + run: pip install pip-tools - name: Install dependencies - run: make install-python-ci-dependencies + run: | + make compile-protos-go + make install-python-ci-dependencies - name: Lint python run: make lint-python lint-go: - container: gcr.io/kf-feast/feast-ci:latest - runs-on: [ubuntu-latest] + runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 + - name: Setup Go + id: setup-go + uses: actions/setup-go@v2 + with: + go-version: 1.17.7 + - name: Setup Python + id: setup-python + uses: actions/setup-python@v2 + with: + python-version: "3.7" + - name: Upgrade pip version + run: | + pip install --upgrade "pip>=21.3.1" - name: Install dependencies - run: make install-go-ci-dependencies + run: make install-go-proto-dependencies - name: Lint go run: make lint-go \ No newline at end of file diff --git a/.github/workflows/master_only.yml b/.github/workflows/master_only.yml new file mode 100644 index 0000000000..c2a37f6f2e --- /dev/null +++ b/.github/workflows/master_only.yml @@ -0,0 +1,208 @@ +name: integration-tests-and-build + +on: + push: + branches: + - master + +jobs: + build-lambda-docker-image: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Set up QEMU + uses: docker/setup-qemu-action@v1 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v1 + - name: Set up AWS SDK + uses: aws-actions/configure-aws-credentials@v1 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: us-west-2 + - name: Login to Amazon ECR + id: login-ecr + uses: aws-actions/amazon-ecr-login@v1 + - name: Set ECR image tag + id: image-tag + run: echo "::set-output name=DOCKER_IMAGE_TAG::`git rev-parse HEAD`" + - name: Cache Public ECR Image + id: lambda_python_3_9 + uses: actions/cache@v2 + with: + path: ~/cache + key: lambda_python_3_9 + - name: Handle Cache Miss (pull public ECR image & save it to tar file) + if: steps.cache-primes.outputs.cache-hit != 'true' + run: | + mkdir -p ~/cache + docker pull public.ecr.aws/lambda/python:3.9 + docker save public.ecr.aws/lambda/python:3.9 -o ~/cache/lambda_python_3_9.tar + - name: Handle Cache Hit (load docker image from tar file) + if: steps.cache-primes.outputs.cache-hit == 'true' + run: | + docker load -i ~/cache/lambda_python_3_9.tar + - name: Build and push + env: + ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }} + ECR_REPOSITORY: feast-python-server + run: | + docker build \ + --file sdk/python/feast/infra/feature_servers/aws_lambda/Dockerfile \ + --tag $ECR_REGISTRY/$ECR_REPOSITORY:${{ steps.image-tag.outputs.DOCKER_IMAGE_TAG }} \ + . + docker push $ECR_REGISTRY/$ECR_REPOSITORY:${{ steps.image-tag.outputs.DOCKER_IMAGE_TAG }} + outputs: + DOCKER_IMAGE_TAG: ${{ steps.image-tag.outputs.DOCKER_IMAGE_TAG }} + integration-test-python-and-go: + needs: build-lambda-docker-image + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + python-version: [ "3.7", "3.8", "3.9", "3.10" ] + go-version: [ 1.17.0 ] + os: [ ubuntu-latest ] + env: + OS: ${{ matrix.os }} + PYTHON: ${{ matrix.python-version }} + services: + redis: + image: redis + ports: + - 6379:6379 + options: >- + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + steps: + - uses: actions/checkout@v2 + - name: Setup Python + id: setup-python + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + architecture: x64 + - name: Setup Go + id: setup-go + uses: actions/setup-go@v2 + with: + go-version: ${{ matrix.go-version }} + - name: Set up Cloud SDK + uses: google-github-actions/setup-gcloud@v0 + with: + project_id: ${{ secrets.GCP_PROJECT_ID }} + service_account_key: ${{ secrets.GCP_SA_KEY }} + export_default_credentials: true + - name: Use gcloud CLI + run: gcloud info + - name: Set up AWS SDK + uses: aws-actions/configure-aws-credentials@v1 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: us-west-2 + - name: Use AWS CLI + run: aws sts get-caller-identity + - name: Upgrade pip version + run: | + pip install --upgrade "pip>=21.3.1" + - name: Get pip cache dir + id: pip-cache + run: | + echo "::set-output name=dir::$(pip cache dir)" + - name: pip cache + uses: actions/cache@v2 + with: + path: | + ${{ steps.pip-cache.outputs.dir }} + /opt/hostedtoolcache/Python + /Users/runner/hostedtoolcache/Python + key: ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-${{ hashFiles(format('**/py{0}-ci-requirements.txt', env.PYTHON)) }} + restore-keys: | + ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip- + - name: Install pip-tools + run: pip install pip-tools + - name: Install dependencies + run: make install-python-ci-dependencies + - name: Setup Redis Cluster + run: | + docker pull vishnunair/docker-redis-cluster:latest + docker run -d -p 6001:6379 -p 6002:6380 -p 6003:6381 -p 6004:6382 -p 6005:6383 -p 6006:6384 --name redis-cluster vishnunair/docker-redis-cluster + - name: Test python and go + env: + FEAST_SERVER_DOCKER_IMAGE_TAG: ${{ needs.build-lambda-docker-image.outputs.DOCKER_IMAGE_TAG }} + FEAST_USAGE: "False" + IS_TEST: "True" + SNOWFLAKE_CI_DEPLOYMENT: ${{ secrets.SNOWFLAKE_CI_DEPLOYMENT }} + SNOWFLAKE_CI_USER: ${{ secrets.SNOWFLAKE_CI_USER }} + SNOWFLAKE_CI_PASSWORD: ${{ secrets.SNOWFLAKE_CI_PASSWORD }} + SNOWFLAKE_CI_ROLE: ${{ secrets.SNOWFLAKE_CI_ROLE }} + SNOWFLAKE_CI_WAREHOUSE: ${{ secrets.SNOWFLAKE_CI_WAREHOUSE }} + run: pytest -n 8 --cov=./ --cov-report=xml --verbose --color=yes sdk/python/tests --integration --durations=5 +# - name: Upload coverage to Codecov +# uses: codecov/codecov-action@v1 +# with: +# token: ${{ secrets.CODECOV_TOKEN }} +# files: ./coverage.xml +# flags: integrationtests +# env_vars: OS,PYTHON +# fail_ci_if_error: true +# verbose: true + - name: Benchmark python + env: + FEAST_SERVER_DOCKER_IMAGE_TAG: ${{ needs.build-lambda-docker-image.outputs.DOCKER_IMAGE_TAG }} + FEAST_USAGE: "False" + IS_TEST: "True" + SNOWFLAKE_CI_DEPLOYMENT: ${{ secrets.SNOWFLAKE_CI_DEPLOYMENT }} + SNOWFLAKE_CI_USER: ${{ secrets.SNOWFLAKE_CI_USER }} + SNOWFLAKE_CI_PASSWORD: ${{ secrets.SNOWFLAKE_CI_PASSWORD }} + SNOWFLAKE_CI_ROLE: ${{ secrets.SNOWFLAKE_CI_ROLE }} + SNOWFLAKE_CI_WAREHOUSE: ${{ secrets.SNOWFLAKE_CI_WAREHOUSE }} + run: pytest --verbose --color=yes sdk/python/tests --integration --benchmark --benchmark-autosave --benchmark-save-data --durations=5 + - name: Upload Benchmark Artifact to S3 + run: aws s3 cp --recursive .benchmarks s3://feast-ci-pytest-benchmarks + + build-all-docker-images: + runs-on: ubuntu-latest + strategy: + matrix: + component: [ feature-server-python-aws, feature-server-java, feature-transformation-server ] + env: + MAVEN_CACHE: gs://feast-templocation-kf-feast/.m2.2020-08-19.tar + REGISTRY: gcr.io/kf-feast + steps: + - uses: actions/checkout@v2 + - name: Set up QEMU + uses: docker/setup-qemu-action@v1 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v1 + - name: Login to DockerHub + uses: docker/login-action@v1 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Set up Cloud SDK + uses: google-github-actions/setup-gcloud@v0 + with: + project_id: ${{ secrets.GCP_PROJECT_ID }} + service_account_key: ${{ secrets.GCP_SA_KEY }} + export_default_credentials: true + - name: Use gcloud CLI + run: gcloud info + - run: gcloud auth configure-docker --quiet + - name: Get m2 cache + run: | + infra/scripts/download-maven-cache.sh \ + --archive-uri ${MAVEN_CACHE} \ + --output-dir . + - name: Build image + run: | + make build-${{ matrix.component }}-docker REGISTRY=${REGISTRY} VERSION=${GITHUB_SHA} + - name: Push image + run: | + make push-${{ matrix.component }}-docker REGISTRY=${REGISTRY} VERSION=${GITHUB_SHA} + + docker tag ${REGISTRY}/${{ matrix.component }}:${GITHUB_SHA} ${REGISTRY}/${{ matrix.component }}:develop + docker push ${REGISTRY}/${{ matrix.component }}:develop diff --git a/.github/workflows/pr_integration_tests.yml b/.github/workflows/pr_integration_tests.yml index 26ac821ac2..f3f5f85836 100644 --- a/.github/workflows/pr_integration_tests.yml +++ b/.github/workflows/pr_integration_tests.yml @@ -7,16 +7,81 @@ on: - synchronize - labeled +# concurrency is currently broken, see details https://github.com/actions/runner/issues/1532 +#concurrency: +# group: pr-integration-tests-${{ github.event.pull_request.number }} +# cancel-in-progress: true + jobs: + build-docker-image: + # all jobs MUST have this if check for 'ok-to-test' or 'approved' or 'lgtm' for security purposes. + if: + (github.event.action == 'labeled' && (github.event.label.name == 'lgtm' || github.event.label.name == 'approved' || github.event.label.name == 'ok-to-test')) || + (github.event.action != 'labeled' && (contains(github.event.pull_request.labels.*.name, 'ok-to-test') || contains(github.event.pull_request.labels.*.name, 'approved'))) + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + with: + # pull_request_target runs the workflow in the context of the base repo + # as such actions/checkout needs to be explicit configured to retrieve + # code from the PR. + ref: refs/pull/${{ github.event.pull_request.number }}/merge + submodules: recursive + - name: Set up QEMU + uses: docker/setup-qemu-action@v1 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v1 + - name: Set up AWS SDK + uses: aws-actions/configure-aws-credentials@v1 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: us-west-2 + - name: Login to Amazon ECR + id: login-ecr + uses: aws-actions/amazon-ecr-login@v1 + - name: Set ECR image tag + id: image-tag + run: echo "::set-output name=DOCKER_IMAGE_TAG::`git rev-parse HEAD`" + - name: Cache Public ECR Image + id: lambda_python_3_9 + uses: actions/cache@v2 + with: + path: ~/cache + key: lambda_python_3_9 + - name: Handle Cache Miss (pull public ECR image & save it to tar file) + if: steps.cache-primes.outputs.cache-hit != 'true' + run: | + mkdir -p ~/cache + docker pull public.ecr.aws/lambda/python:3.9 + docker save public.ecr.aws/lambda/python:3.9 -o ~/cache/lambda_python_3_9.tar + - name: Handle Cache Hit (load docker image from tar file) + if: steps.cache-primes.outputs.cache-hit == 'true' + run: | + docker load -i ~/cache/lambda_python_3_9.tar + - name: Build and push + env: + ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }} + ECR_REPOSITORY: feast-python-server + run: | + docker build \ + --file sdk/python/feast/infra/feature_servers/aws_lambda/Dockerfile \ + --tag $ECR_REGISTRY/$ECR_REPOSITORY:${{ steps.image-tag.outputs.DOCKER_IMAGE_TAG }} \ + . + docker push $ECR_REGISTRY/$ECR_REPOSITORY:${{ steps.image-tag.outputs.DOCKER_IMAGE_TAG }} + outputs: + DOCKER_IMAGE_TAG: ${{ steps.image-tag.outputs.DOCKER_IMAGE_TAG }} integration-test-python: - # all jobs MUST have this if check for 'ok-to-test' or 'approved' for security purposes. - if: (github.event.action == 'labeled' && (github.event.label.name == 'approved' || github.event.label.name == 'ok-to-test')) - || (github.event.action != 'labeled' && (contains(github.event.pull_request.labels.*.name, 'ok-to-test') || contains(github.event.pull_request.labels.*.name, 'approved'))) + # all jobs MUST have this if check for 'ok-to-test' or 'approved' or 'lgtm' for security purposes. + if: + (github.event.action == 'labeled' && (github.event.label.name == 'lgtm' || github.event.label.name == 'approved' || github.event.label.name == 'ok-to-test')) || + (github.event.action != 'labeled' && (contains(github.event.pull_request.labels.*.name, 'ok-to-test') || contains(github.event.pull_request.labels.*.name, 'approved'))) + needs: build-docker-image runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: - python-version: [ 3.7, 3.8, 3.9 ] + python-version: [ "3.7" ] os: [ ubuntu-latest ] env: OS: ${{ matrix.os }} @@ -41,9 +106,15 @@ jobs: submodules: recursive - name: Setup Python uses: actions/setup-python@v2 + id: setup-python with: python-version: ${{ matrix.python-version }} architecture: x64 + - name: Setup Go + id: setup-go + uses: actions/setup-go@v2 + with: + go-version: 1.17.7 - name: Set up gcloud SDK uses: google-github-actions/setup-gcloud@v0 with: @@ -60,16 +131,49 @@ jobs: aws-region: us-west-2 - name: Use AWS CLI run: aws sts get-caller-identity + - name: Upgrade pip version + run: | + pip install --upgrade "pip>=21.3.1" + - name: Get pip cache dir + id: pip-cache + run: | + echo "::set-output name=dir::$(pip cache dir)" + - name: pip cache + uses: actions/cache@v2 + with: + path: | + ${{ steps.pip-cache.outputs.dir }} + /opt/hostedtoolcache/Python + /Users/runner/hostedtoolcache/Python + key: ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-${{ hashFiles(format('**/py{0}-ci-requirements.txt', env.PYTHON)) }} + restore-keys: | + ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip- + - name: Install pip-tools + run: pip install pip-tools - name: Install dependencies run: make install-python-ci-dependencies + - name: Setup Redis Cluster + run: | + docker pull vishnunair/docker-redis-cluster:latest + docker run -d -p 6001:6379 -p 6002:6380 -p 6003:6381 -p 6004:6382 -p 6005:6383 -p 6006:6384 --name redis-cluster vishnunair/docker-redis-cluster - name: Test python - run: FEAST_USAGE=False pytest -n 8 --cov=./ --cov-report=xml --verbose --color=yes sdk/python/tests --integration - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v1 - with: - token: ${{ secrets.CODECOV_TOKEN }} - files: ./coverage.xml - flags: integrationtests - env_vars: OS,PYTHON - fail_ci_if_error: true - verbose: true + if: ${{ always() }} # this will guarantee that step won't be canceled and resources won't leak + env: + FEAST_SERVER_DOCKER_IMAGE_TAG: ${{ needs.build-docker-image.outputs.DOCKER_IMAGE_TAG }} + FEAST_USAGE: "False" + IS_TEST: "True" + SNOWFLAKE_CI_DEPLOYMENT: ${{ secrets.SNOWFLAKE_CI_DEPLOYMENT }} + SNOWFLAKE_CI_USER: ${{ secrets.SNOWFLAKE_CI_USER }} + SNOWFLAKE_CI_PASSWORD: ${{ secrets.SNOWFLAKE_CI_PASSWORD }} + SNOWFLAKE_CI_ROLE: ${{ secrets.SNOWFLAKE_CI_ROLE }} + SNOWFLAKE_CI_WAREHOUSE: ${{ secrets.SNOWFLAKE_CI_WAREHOUSE }} + run: pytest -n 8 --cov=./ --cov-report=xml --verbose --color=yes sdk/python/tests --integration --durations=5 +# - name: Upload coverage to Codecov +# uses: codecov/codecov-action@v1 +# with: +# token: ${{ secrets.CODECOV_TOKEN }} +# files: ./coverage.xml +# flags: integrationtests +# env_vars: OS,PYTHON +# fail_ci_if_error: true +# verbose: true diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml new file mode 100644 index 0000000000..763cb52d0b --- /dev/null +++ b/.github/workflows/publish.yml @@ -0,0 +1,262 @@ +name: publish + +on: + push: + tags: + - 'v*.*.*' + +jobs: + get-version: + runs-on: ubuntu-latest + outputs: + release_version: ${{ steps.get_release_version.outputs.release_version }} + version_without_prefix: ${{ steps.get_release_version_without_prefix.outputs.version_without_prefix }} + highest_semver_tag: ${{ steps.get_highest_semver.outputs.highest_semver_tag }} + steps: + - uses: actions/checkout@v2 + - name: Get release version + id: get_release_version + run: echo ::set-output name=release_version::${GITHUB_REF#refs/*/} + - name: Get release version without prefix + id: get_release_version_without_prefix + env: + RELEASE_VERSION: ${{ steps.get_release_version.outputs.release_version }} + run: | + echo ::set-output name=version_without_prefix::${RELEASE_VERSION:1} + - name: Get highest semver + id: get_highest_semver + env: + RELEASE_VERSION: ${{ steps.get_release_version.outputs.release_version }} + run: | + source infra/scripts/setup-common-functions.sh + SEMVER_REGEX='^v[0-9]+\.[0-9]+\.[0-9]+(-([0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*))?$' + if echo "${RELEASE_VERSION}" | grep -P "$SEMVER_REGEX" &>/dev/null ; then + echo ::set-output name=highest_semver_tag::$(get_tag_release -m) + fi + - name: Check output + env: + RELEASE_VERSION: ${{ steps.get_release_version.outputs.release_version }} + VERSION_WITHOUT_PREFIX: ${{ steps.get_release_version_without_prefix.outputs.version_without_prefix }} + HIGHEST_SEMVER_TAG: ${{ steps.get_highest_semver.outputs.highest_semver_tag }} + run: | + echo $RELEASE_VERSION + echo $VERSION_WITHOUT_PREFIX + echo $HIGHEST_SEMVER_TAG + + build-publish-docker-images: + runs-on: ubuntu-latest + needs: get-version + strategy: + matrix: + component: [feature-server-python-aws, feature-server-java, feature-transformation-server] + env: + MAVEN_CACHE: gs://feast-templocation-kf-feast/.m2.2020-08-19.tar + REGISTRY: feastdev + steps: + - uses: actions/checkout@v2 + - name: Set up QEMU + uses: docker/setup-qemu-action@v1 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v1 + - name: Login to DockerHub + uses: docker/login-action@v1 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Set up Cloud SDK + uses: google-github-actions/setup-gcloud@v0 + with: + project_id: ${{ secrets.GCP_PROJECT_ID }} + service_account_key: ${{ secrets.GCP_SA_KEY }} + export_default_credentials: true + - name: Use gcloud CLI + run: gcloud info + - run: gcloud auth configure-docker --quiet + - name: Get m2 cache + run: | + infra/scripts/download-maven-cache.sh \ + --archive-uri ${MAVEN_CACHE} \ + --output-dir . + - name: Build image + run: | + make build-${{ matrix.component }}-docker REGISTRY=${REGISTRY} VERSION=${VERSION_WITHOUT_PREFIX} + env: + RELEASE_VERSION: ${{ needs.get-version.outputs.release_version }} + VERSION_WITHOUT_PREFIX: ${{ needs.get-version.outputs.version_without_prefix }} + HIGHEST_SEMVER_TAG: ${{ needs.get-version.outputs.highest_semver_tag }} + - name: Push versioned images + env: + RELEASE_VERSION: ${{ needs.get-version.outputs.release_version }} + VERSION_WITHOUT_PREFIX: ${{ needs.get-version.outputs.version_without_prefix }} + HIGHEST_SEMVER_TAG: ${{ needs.get-version.outputs.highest_semver_tag }} + run: | + make push-${{ matrix.component }}-docker REGISTRY=${REGISTRY} VERSION=${VERSION_WITHOUT_PREFIX} + + echo "Only push to latest tag if tag is the highest semver version $HIGHEST_SEMVER_TAG" + if [ "${VERSION_WITHOUT_PREFIX}" = "${HIGHEST_SEMVER_TAG:1}" ] + then + docker tag feastdev/${{ matrix.component }}:${VERSION_WITHOUT_PREFIX} feastdev/${{ matrix.component }}:latest + docker push feastdev/${{ matrix.component }}:latest + fi + + publish-helm-charts: + runs-on: ubuntu-latest + needs: get-version + env: + HELM_VERSION: v3.8.0 + VERSION_WITHOUT_PREFIX: ${{ needs.get-version.outputs.version_without_prefix }} + steps: + - uses: actions/checkout@v2 + - uses: google-github-actions/setup-gcloud@v0 + with: + version: '290.0.1' + export_default_credentials: true + project_id: ${{ secrets.GCP_PROJECT_ID }} + service_account_key: ${{ secrets.GCP_SA_KEY }} + - run: gcloud auth configure-docker --quiet + - name: Remove previous Helm + run: sudo rm -rf $(which helm) + - name: Install Helm + run: ./infra/scripts/helm/install-helm.sh + - name: Validate Helm chart prior to publishing + run: ./infra/scripts/helm/validate-helm-chart-publish.sh + - name: Validate all version consistency + run: ./infra/scripts/helm/validate-helm-chart-versions.sh $VERSION_WITHOUT_PREFIX + - name: Publish Helm charts + run: ./infra/scripts/helm/push-helm-charts.sh $VERSION_WITHOUT_PREFIX + + publish-python-sdk: + runs-on: ubuntu-latest + needs: [build-python-sdk, build-python-sdk-macos-py310] + steps: + - uses: actions/download-artifact@v2 + with: + name: wheels + path: dist + - uses: pypa/gh-action-pypi-publish@v1.4.2 + with: + user: __token__ + password: ${{ secrets.PYPI_PASSWORD }} + + + build-python-sdk: + name: Build wheels on ${{ matrix.os }} + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ ubuntu-latest, macos-10.15 ] + + steps: + - uses: actions/checkout@v2 + + - name: Build wheels + uses: pypa/cibuildwheel@v2.4.0 + with: + package-dir: sdk/python + env: + CIBW_BUILD: "cp3*_x86_64" + CIBW_SKIP: "cp36-* *-musllinux_x86_64 cp310-macosx_x86_64" + CIBW_ARCHS: "native" + CIBW_ENVIRONMENT: > + COMPILE_GO=True + CIBW_BEFORE_ALL_LINUX: | + yum install -y golang + CIBW_BEFORE_ALL_MACOS: | + curl -o python.pkg https://www.python.org/ftp/python/3.9.12/python-3.9.12-macosx10.9.pkg + sudo installer -pkg python.pkg -target / + CIBW_BEFORE_BUILD: | + make install-protoc-dependencies + make install-go-proto-dependencies + make install-go-ci-dependencies + + - uses: actions/upload-artifact@v2 + with: + name: wheels + path: ./wheelhouse/*.whl + + + build-python-sdk-macos-py310: + runs-on: macos-10.15 + env: + COMPILE_GO: True + steps: + - uses: actions/checkout@v2 + - name: Setup Python + id: setup-python + uses: actions/setup-python@v2 + with: + python-version: "3.10" + architecture: x64 + - name: Install dependencies + run: | + pip install -U pip setuptools wheel twine + make install-protoc-dependencies + make install-go-proto-dependencies + make install-go-ci-dependencies + - name: Build + run: | + cd sdk/python + python3 setup.py sdist bdist_wheel + + - uses: actions/upload-artifact@v2 + with: + name: wheels + path: sdk/python/dist/* + + + publish-java-sdk: + container: maven:3.6-jdk-11 + runs-on: ubuntu-latest + needs: get-version + steps: + - uses: actions/checkout@v2 + with: + submodules: 'true' + - name: Set up JDK 11 + uses: actions/setup-java@v1 + with: + java-version: '11' + java-package: jdk + architecture: x64 + - uses: actions/setup-python@v2 + with: + python-version: '3.7' + architecture: 'x64' + - uses: actions/cache@v2 + with: + path: ~/.m2/repository + key: ${{ runner.os }}-it-maven-${{ hashFiles('**/pom.xml') }} + restore-keys: | + ${{ runner.os }}-it-maven- + - name: Publish java sdk + env: + VERSION_WITHOUT_PREFIX: ${{ needs.get-version.outputs.version_without_prefix }} + GPG_PUBLIC_KEY: ${{ secrets.GPG_PUBLIC_KEY }} + GPG_PRIVATE_KEY: ${{ secrets.GPG_PRIVATE_KEY }} + MAVEN_SETTINGS: ${{ secrets.MAVEN_SETTINGS }} + run: | + echo -n "$GPG_PUBLIC_KEY" > /root/public-key + echo -n "$GPG_PRIVATE_KEY" > /root/private-key + mkdir -p /root/.m2/ + echo -n "$MAVEN_SETTINGS" > /root/.m2/settings.xml + infra/scripts/publish-java-sdk.sh --revision ${VERSION_WITHOUT_PREFIX} --gpg-key-import-dir /root + + publish-web-ui-npm: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-node@v2 + with: + node-version: '17.x' + registry-url: 'https://registry.npmjs.org' + - name: Install yarn dependencies + working-directory: ./ui + run: yarn install + - name: Build yarn rollup + working-directory: ./ui + run: yarn build:lib + - name: Publish UI package + working-directory: ./ui + run: npm publish + env: + NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index a563b80a75..2f4d15590a 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,140 +1,43 @@ name: release on: - push: - tags: - - 'v*.*.*' + workflow_dispatch: + inputs: + dry_run: + description: 'Dry Run' + required: true + default: true + type: boolean + token: + description: 'Personal Access Token' + required: true + default: "" + type: string jobs: - get-version: - runs-on: ubuntu-latest - outputs: - release_version: ${{ steps.get_release_version.outputs.release_version }} - version_without_prefix: ${{ steps.get_release_version_without_prefix.outputs.version_without_prefix }} - highest_semver_tag: ${{ steps.get_highest_semver.outputs.highest_semver_tag }} - steps: - - uses: actions/checkout@v2 - - name: Get release version - id: get_release_version - run: echo ::set-output name=release_version::${GITHUB_REF#refs/*/} - - name: Get release version without prefix - id: get_release_version_without_prefix - env: - RELEASE_VERSION: ${{ steps.get_release_version.outputs.release_version }} - run: | - echo ::set-output name=version_without_prefix::${RELEASE_VERSION:1} - - name: Get highest semver - id: get_highest_semver - env: - RELEASE_VERSION: ${{ steps.get_release_version.outputs.release_version }} - run: | - source infra/scripts/setup-common-functions.sh - SEMVER_REGEX='^v[0-9]+\.[0-9]+\.[0-9]+(-([0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*))?$' - if echo "${RELEASE_VERSION}" | grep -P "$SEMVER_REGEX" &>/dev/null ; then - echo ::set-output name=highest_semver_tag::$(get_tag_release -m) - fi - - name: Check output - env: - RELEASE_VERSION: ${{ steps.get_release_version.outputs.release_version }} - VERSION_WITHOUT_PREFIX: ${{ steps.get_release_version_without_prefix.outputs.version_without_prefix }} - HIGHEST_SEMVER_TAG: ${{ steps.get_highest_semver.outputs.highest_semver_tag }} - run: | - echo $RELEASE_VERSION - echo $VERSION_WITHOUT_PREFIX - echo $HIGHEST_SEMVER_TAG - - build-publish-docker-images: - runs-on: [ubuntu-latest] - needs: get-version - strategy: - matrix: - component: [jupyter] - env: - MAVEN_CACHE: gs://feast-templocation-kf-feast/.m2.2020-08-19.tar - steps: - - uses: actions/checkout@v2 - - name: Set up QEMU - uses: docker/setup-qemu-action@v1 - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v1 - - name: Login to DockerHub - uses: docker/login-action@v1 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Set up Cloud SDK - uses: google-github-actions/setup-gcloud@master - with: - project_id: ${{ secrets.GCP_PROJECT_ID }} - service_account_key: ${{ secrets.GCP_SA_KEY }} - export_default_credentials: true - - name: Use gcloud CLI - run: gcloud info - - run: gcloud auth configure-docker --quiet - - name: Get m2 cache - run: | - infra/scripts/download-maven-cache.sh \ - --archive-uri ${MAVEN_CACHE} \ - --output-dir . - - name: Build and push versioned images - env: - RELEASE_VERSION: ${{ needs.get-version.outputs.release_version }} - VERSION_WITHOUT_PREFIX: ${{ needs.get-version.outputs.version_without_prefix }} - HIGHEST_SEMVER_TAG: ${{ needs.get-version.outputs.highest_semver_tag }} - run: | - docker build --build-arg VERSION=$RELEASE_VERSION \ - -t gcr.io/kf-feast/feast-${{ matrix.component }}:${GITHUB_SHA} \ - -t gcr.io/kf-feast/feast-${{ matrix.component }}:${VERSION_WITHOUT_PREFIX} \ - -t feastdev/feast-${{ matrix.component }}:${VERSION_WITHOUT_PREFIX} \ - -f infra/docker/${{ matrix.component }}/Dockerfile . - docker push gcr.io/kf-feast/feast-${{ matrix.component }}:${VERSION_WITHOUT_PREFIX} - docker push feastdev/feast-${{ matrix.component }}:${VERSION_WITHOUT_PREFIX} - - echo "Only push to latest tag if tag is the highest semver version $HIGHEST_SEMVER_TAG" - if [ "${VERSION_WITHOUT_PREFIX}" = "${HIGHEST_SEMVER_TAG:1}" ] - then - docker tag feastdev/feast-${{ matrix.component }}:${VERSION_WITHOUT_PREFIX} feastdev/feast-${{ matrix.component }}:latest - docker tag gcr.io/kf-feast/feast-${{ matrix.component }}:${VERSION_WITHOUT_PREFIX} gcr.io/kf-feast/feast-${{ matrix.component }}:latest - docker push feastdev/feast-${{ matrix.component }}:latest - docker push gcr.io/kf-feast/feast-${{ matrix.component }}:latest - fi - - publish-helm-charts: - runs-on: ubuntu-latest - needs: get-version - env: - HELM_VERSION: v2.17.0 - steps: - - uses: actions/checkout@v2 - - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master - with: - version: '290.0.1' - export_default_credentials: true - project_id: ${{ secrets.GCP_PROJECT_ID }} - service_account_key: ${{ secrets.GCP_SA_KEY }} - - run: gcloud auth configure-docker --quiet - - name: Validate chart release versions - run: ./infra/scripts/validate-helm-chart-docker-image.sh - - name: Remove previous Helm - run: sudo rm -rf $(which helm) - - name: Install Helm - run: ./infra/scripts/install-helm.sh - - name: Publish Helm charts - run: ./infra/scripts/sync-helm-charts.sh - - publish-python-sdk: + release: + name: release runs-on: ubuntu-latest env: - TWINE_USERNAME: __token__ - TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} - container: python:3.7 + GITHUB_TOKEN: ${{ github.event.inputs.token }} + GIT_AUTHOR_NAME: feast-ci-bot + GIT_AUTHOR_EMAIL: feast-ci-bot@willem.co + GIT_COMMITTER_NAME: feast-ci-bot + GIT_COMMITTER_EMAIL: feast-ci-bot@willem.co steps: - - uses: actions/checkout@v2 - - name: Install dependencies - run: make install-python-ci-dependencies - - name: Publish Python Package - run: | - cd sdk/python - python3 -m pip install --user --upgrade setuptools wheel twine - python3 setup.py sdist bdist_wheel - python3 -m twine upload --verbose dist/* \ No newline at end of file + - name: Checkout + uses: actions/checkout@v2 + with: + persist-credentials: false + - name: Setup Node.js + uses: actions/setup-node@v2 + with: + node-version: '16' + - name: Release (Dry Run) + if: github.event.inputs.dry_run == 'true' + run: | + npx -p @semantic-release/changelog -p @semantic-release/git -p @semantic-release/exec -p semantic-release semantic-release --dry-run + - name: Release + if: github.event.inputs.dry_run == 'false' + run: | + npx -p @semantic-release/changelog -p @semantic-release/git -p @semantic-release/exec -p semantic-release semantic-release diff --git a/.github/workflows/unit_tests.yml b/.github/workflows/unit_tests.yml index 7b480a8609..ccbe4376cd 100644 --- a/.github/workflows/unit_tests.yml +++ b/.github/workflows/unit_tests.yml @@ -7,7 +7,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: [ 3.7, 3.8, 3.9 ] + python-version: [ "3.7", "3.8", "3.9", "3.10" ] os: [ ubuntu-latest, macOS-latest] env: OS: ${{ matrix.os }} @@ -15,32 +15,75 @@ jobs: steps: - uses: actions/checkout@v2 - name: Setup Python + id: setup-python uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} architecture: x64 + - name: Setup Go + id: setup-go + uses: actions/setup-go@v2 + with: + go-version: 1.17.7 + - name: Upgrade pip version + run: | + pip install --upgrade "pip>=21.3.1" + - name: Get pip cache dir + id: pip-cache + run: | + echo "::set-output name=dir::$(pip cache dir)" + - name: pip cache + uses: actions/cache@v2 + with: + path: | + ${{ steps.pip-cache.outputs.dir }} + /opt/hostedtoolcache/Python + /Users/runner/hostedtoolcache/Python + key: ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-${{ hashFiles(format('**/py{0}-ci-requirements.txt', env.PYTHON)) }} + restore-keys: | + ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip- + - name: Install pip-tools + run: pip install pip-tools - name: Install dependencies run: make install-python-ci-dependencies - name: Test Python + env: + SNOWFLAKE_CI_DEPLOYMENT: ${{ secrets.SNOWFLAKE_CI_DEPLOYMENT }} + SNOWFLAKE_CI_USER: ${{ secrets.SNOWFLAKE_CI_USER }} + SNOWFLAKE_CI_PASSWORD: ${{ secrets.SNOWFLAKE_CI_PASSWORD }} + SNOWFLAKE_CI_ROLE: ${{ secrets.SNOWFLAKE_CI_ROLE }} + SNOWFLAKE_CI_WAREHOUSE: ${{ secrets.SNOWFLAKE_CI_WAREHOUSE }} run: FEAST_USAGE=False pytest -n 8 --cov=./ --cov-report=xml --verbose --color=yes sdk/python/tests - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v1 - with: - token: ${{ secrets.CODECOV_TOKEN }} - files: ./coverage.xml - flags: unittests - env_vars: OS,PYTHON - fail_ci_if_error: true - verbose: true +# - name: Upload coverage to Codecov +# uses: codecov/codecov-action@v1 +# with: +# token: ${{ secrets.CODECOV_TOKEN }} +# files: ./coverage.xml +# flags: unittests +# env_vars: OS,PYTHON +# fail_ci_if_error: true +# verbose: true unit-test-go: runs-on: ubuntu-latest - container: gcr.io/kf-feast/feast-ci:latest steps: - uses: actions/checkout@v2 + - name: Setup Python + id: setup-python + uses: actions/setup-python@v2 + with: + python-version: "3.7" + - name: Upgrade pip version + run: | + pip install --upgrade "pip>=21.3.1" + - name: Setup Go + id: setup-go + uses: actions/setup-go@v2 + with: + go-version: 1.17.7 - name: Install dependencies - run: make install-go-ci-dependencies + run: make install-go-proto-dependencies - name: Compile protos run: make compile-protos-go - name: Test - run: make test-go \ No newline at end of file + run: make test-go diff --git a/.gitignore b/.gitignore index 9559998520..0f3165e841 100644 --- a/.gitignore +++ b/.gitignore @@ -1,9 +1,6 @@ ### Scratch files ### scratch* -### Feast UI deprecated folder #### -ui/ - ### Local Environment ### *local*.env @@ -54,7 +51,7 @@ vendor .terraform/ *.tfvars -# python +# python # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] @@ -107,6 +104,8 @@ coverage.xml *.cover .hypothesis/ .pytest_cache/ +infra/scripts/*.conf +go/cmd/server/logging/feature_repo/data/ # Translations *.mo @@ -187,4 +186,39 @@ dmypy.json # Protos sdk/python/docs/html sdk/python/feast/protos/ -sdk/go/protos/ \ No newline at end of file +sdk/go/protos/ +go/protos/ + +#benchmarks +.benchmarks + +# Examples registry +**/registry.db +**/*.aof +**/*.rdb +**/nodes.conf + + +# Feast UI dependencies +ui/node_modules +ui/.pnp +ui/.pnp.js +ui/coverage +ui/build +ui/feature_repo/data/online.db +ui/feature_repo/registry.db +ui/.vercel + +# misc +.DS_Store +.env.local +.env.development.local +.env.test.local +.env.production.local + +**/npm-debug.log* +**/yarn-debug.log* +**/yarn-error.log* + +# Go subprocess binaries (built during feast pip package building) +sdk/python/feast/binaries/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index bc6dcfc6bd..82721e21e3 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -12,4 +12,9 @@ repos: name: Lint stages: [ push ] language: system - entry: make lint \ No newline at end of file + entry: make lint + - id: template + name: Build Templates + stages: [ commit ] + language: system + entry: make build-templates \ No newline at end of file diff --git a/.prow.yaml b/.prow.yaml index e614e4a2f9..4c8372cc7c 100644 --- a/.prow.yaml +++ b/.prow.yaml @@ -1,122 +1,4 @@ -presubmits: -- name: test-core-and-ingestion - decorate: true - spec: - containers: - - image: maven:3.6-jdk-11 - command: ["infra/scripts/test-core-ingestion.sh"] - resources: - requests: - cpu: "2000m" - memory: "1536Mi" - skip_branches: - - ^v0\.(3|4)-branch$ - -- name: test-core-and-ingestion-java-8 - decorate: true - always_run: true - spec: - containers: - - image: maven:3.6-jdk-8 - command: ["infra/scripts/test-core-ingestion.sh"] - resources: - requests: - cpu: "2000m" - memory: "1536Mi" - branches: - - ^v0\.(3|4)-branch$ - -- name: test-serving - decorate: true - spec: - containers: - - image: maven:3.6-jdk-11 - command: ["infra/scripts/test-serving.sh"] - skip_branches: - - ^v0\.(3|4)-branch$ - -- name: test-serving-java-8 - decorate: true - always_run: true - spec: - containers: - - image: maven:3.6-jdk-8 - command: ["infra/scripts/test-serving.sh"] - branches: - - ^v0\.(3|4)-branch$ - -- name: test-java-sdk - decorate: true - spec: - containers: - - image: maven:3.6-jdk-11 - command: ["infra/scripts/test-java-sdk.sh"] - skip_branches: - - ^v0\.(3|4)-branch$ - -- name: test-java-sdk-java-8 - decorate: true - always_run: true - spec: - containers: - - image: maven:3.6-jdk-8 - command: ["infra/scripts/test-java-sdk.sh"] - branches: - - ^v0\.(3|4)-branch$ - -- name: test-usage - decorate: true - run_if_changed: "sdk/python/.*" - spec: - containers: - - image: python:3.7 - command: ["infra/scripts/test-usage.sh"] - env: - - name: GOOGLE_APPLICATION_CREDENTIALS - value: /etc/gcloud/service-account.json - volumeMounts: - - mountPath: /etc/gcloud/service-account.json - name: service-account - readOnly: true - subPath: service-account.json - volumes: - - name: service-account - secret: - secretName: feast-service-account - -- name: test-golang-sdk - decorate: true - spec: - containers: - - image: golang:1.13 - command: ["infra/scripts/test-golang-sdk.sh"] - postsubmits: -- name: publish-python-sdk - decorate: true - spec: - containers: - - image: python:3 - command: - - sh - - -c - - | - make package-protos && make compile-protos-python && infra/scripts/publish-python-sdk.sh \ - --directory-path sdk/python --repository pypi - volumeMounts: - - name: pypirc - mountPath: /root/.pypirc - subPath: .pypirc - readOnly: true - volumes: - - name: pypirc - secret: - secretName: pypirc - branches: - # Filter on tags with semantic versioning, prefixed with "v" - # https://github.com/semver/semver/issues/232 - - ^v(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(-(0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(\.(0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*)?(\+[0-9a-zA-Z-]+(\.[0-9a-zA-Z-]+)*)?$ - - name: publish-java-sdk decorate: true spec: @@ -148,31 +30,3 @@ postsubmits: branches: # Filter on tags with semantic versioning, prefixed with "v". - ^v(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(-(0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(\.(0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*)?(\+[0-9a-zA-Z-]+(\.[0-9a-zA-Z-]+)*)?$ - -- name: publish-java-8-sdk - decorate: true - spec: - containers: - - image: maven:3.6-jdk-8 - command: - - bash - - -c - - infra/scripts/publish-java-sdk.sh --revision ${PULL_BASE_REF:1} - volumeMounts: - - name: gpg-keys - mountPath: /etc/gpg - readOnly: true - - name: maven-settings - mountPath: /root/.m2/settings.xml - subPath: settings.xml - readOnly: true - volumes: - - name: gpg-keys - secret: - secretName: gpg-keys - - name: maven-settings - secret: - secretName: maven-settings - branches: - # Filter on tags with semantic versioning, prefixed with "v". v0.3 and v0.4 only. - - ^v0\.(3|4)\.(0|[1-9]\d*)(-(0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(\.(0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*)?(\+[0-9a-zA-Z-]+(\.[0-9a-zA-Z-]+)*)?$ diff --git a/.prow/config.yaml b/.prow/config.yaml index 3bc2d760a3..21adb54fda 100644 --- a/.prow/config.yaml +++ b/.prow/config.yaml @@ -60,10 +60,12 @@ tide: - do-not-merge/invalid-owners-file - do-not-merge/work-in-progress - needs-rebase - - needs-kind merge_method: feast-dev/feast: squash feast-dev/feast-spark: squash + merge_commit_template: + feast-dev/feast: + title: '{{.Title}} (#{{.Number}})' blocker_label: merge-blocker squash_label: tide/squash diff --git a/.prow/plugins.yaml b/.prow/plugins.yaml index 0f89e07be5..eca8851c67 100644 --- a/.prow/plugins.yaml +++ b/.prow/plugins.yaml @@ -12,8 +12,6 @@ plugins: - wip - trigger - config-updater - - require-matching-label - - release-note feast-dev/feast-spark: - approve - assign @@ -27,13 +25,13 @@ plugins: - wip - trigger - config-updater - - require-matching-label - - release-note config_updater: maps: .prow/config.yaml: name: config + .prow.yaml: + name: job-config external_plugins: feast-dev/feast: @@ -44,15 +42,3 @@ external_plugins: - name: needs-rebase events: - pull_request - -require_matching_label: -- missing_label: needs-kind - org: feast-dev - repo: feast - prs: true - regexp: ^kind/ -- missing_label: needs-kind - org: feast-dev - repo: feast-spark - prs: true - regexp: ^kind/ diff --git a/.releaserc.js b/.releaserc.js new file mode 100644 index 0000000000..2acf9b7350 --- /dev/null +++ b/.releaserc.js @@ -0,0 +1,83 @@ +// Release script for semantic-release.gitbook.io + +const execSync = require("child_process").execSync; + +// Get the current branch +const current_branch = execSync("git rev-parse --abbrev-ref HEAD").toString("utf8").trim(); + +// Validate the current branch +if (current_branch !== 'master') { + // Should be a release branch like v0.18-branch + is_valid = /v[0-9]\.[0-9][0-9]\-branch/gm.test(current_branch) + if (!is_valid) { + throw new Error(`Invalid branch name: ${current_branch}. Must be in release branch form like v0.18-branch or master`) + } +} + +// We have to dynamically generate all the supported branches for Feast because we use the `vA.B-branch` pattern for +// maintenance branches +possible_branches = [{name: "master"}, {name: current_branch}] + +// Below is the configuration for semantic release +module.exports = { + branches: possible_branches, + plugins: [ + // Try to guess the type of release we should be doing (minor, patch) + ["@semantic-release/commit-analyzer", { + // Ensure that breaking changes trigger minor instead of major releases + "releaseRules": [ + {breaking: true, release: 'minor'}, + {tag: 'Breaking', release: 'minor'}, + ] + }], + + ["@semantic-release/exec", { + // Validate the type of release we are doing + "verifyReleaseCmd": "./infra/scripts/validate-release.sh ${nextRelease.type} " + current_branch, + + // Bump all version files + "prepareCmd": "python ./infra/scripts/release/bump_file_versions.py ${lastRelease.version} ${nextRelease.version}" + }], + + "@semantic-release/release-notes-generator", + + // Update the changelog + [ + "@semantic-release/changelog", + { + changelogFile: "CHANGELOG.md", + changelogTitle: "# Changelog", + } + ], + + // Make a git commit, tag, and push the changes + [ + "@semantic-release/git", + { + assets: [ + "CHANGELOG.md", + "java/pom.xml", + "infra/charts/**/*.*", + "ui/package.json" + ], + message: "chore(release): release ${nextRelease.version}\n\n${nextRelease.notes}" + } + ], + + // Publish a GitHub release (but don't spam issues/PRs with comments) + [ + "@semantic-release/github", + { + successComment: false, + failComment: false, + failTitle: false, + labels: false, + } + ], + + // For some reason all patches are tagged as pre-release. This step undoes that. + ["@semantic-release/exec", { + "publishCmd": "python ./infra/scripts/release/unset_prerelease.py ${nextRelease.version}" + }], + ] +} diff --git a/CHANGELOG.md b/CHANGELOG.md index da7d7933db..ab3ac0cd1e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,10 +1,707 @@ # Changelog -## [v0.11.0](https://github.com/feast-dev/feast/tree/v0.11.0) (2021-06-24) +## [0.20.2](https://github.com/feast-dev/feast/compare/v0.20.1...v0.20.2) (2022-04-28) + + +### Bug Fixes + +* Feature with timestamp type is incorrectly interpreted by Go FS ([#2588](https://github.com/feast-dev/feast/issues/2588)) ([3ec943a](https://github.com/feast-dev/feast/commit/3ec943a9964cd2a2c58700dd946472788ac39ccb)) +* Fix AWS bootstrap template ([#2604](https://github.com/feast-dev/feast/issues/2604)) ([6df5a49](https://github.com/feast-dev/feast/commit/6df5a4968258337f7480b4e80d831b5e8960f1ad)) +* Fix broken proto conversion methods for data sources ([#2603](https://github.com/feast-dev/feast/issues/2603)) ([c391216](https://github.com/feast-dev/feast/commit/c3912169451757aa60768b302f8dad129fa796a5)) +* Remove ci extra from the feature transformation server dockerfile ([#2618](https://github.com/feast-dev/feast/issues/2618)) ([a7437fa](https://github.com/feast-dev/feast/commit/a7437fa6c49c717b148746455e2de2d6f98002f3)) +* Update field api to add tag parameter corresponding to labels in Feature. ([#2610](https://github.com/feast-dev/feast/issues/2610)) ([40962fc](https://github.com/feast-dev/feast/commit/40962fcc6afc26601eb0440595c99d568463eb42)) +* Use timestamp type when converting unixtimestamp feature type to arrow ([#2593](https://github.com/feast-dev/feast/issues/2593)) ([a1c3ee3](https://github.com/feast-dev/feast/commit/a1c3ee38e2f2a8e4528c8a3f58144568e4337718)) + +## [0.20.1](https://github.com/feast-dev/feast/compare/v0.20.0...v0.20.1) (2022-04-20) + + +### Bug Fixes + +* Addresses ZeroDivisionError when materializing file source with same timestamps ([#2551](https://github.com/feast-dev/feast/issues/2551)) ([5539c51](https://github.com/feast-dev/feast/commit/5539c51646d3d2150df7476aa5ac9d075998b235)) +* Build platform specific python packages with ci-build-wheel ([#2555](https://github.com/feast-dev/feast/issues/2555)) ([1757639](https://github.com/feast-dev/feast/commit/17576396980a02e6ad7d70d69367df0823ef5408)) +* Enforce kw args featureservice ([#2575](https://github.com/feast-dev/feast/issues/2575)) ([4dce254](https://github.com/feast-dev/feast/commit/4dce254dc8c4f7de0c6005907ceba53b44f264ce)) +* Enforce kw args in datasources ([#2567](https://github.com/feast-dev/feast/issues/2567)) ([6374634](https://github.com/feast-dev/feast/commit/6374634c35b3820c4ed12edc7b2e70a9c561bfe5)) +* Fix `__hash__` methods ([#2556](https://github.com/feast-dev/feast/issues/2556)) ([dd8b854](https://github.com/feast-dev/feast/commit/dd8b8546fce90fab099cab71ab318681c3a0c998)) +* Fix DynamoDB fetches when there are entities that are not found ([#2573](https://github.com/feast-dev/feast/issues/2573)) ([882328f](https://github.com/feast-dev/feast/commit/882328f9b6da45a310916e5af23e0926b4186a85)) +* Fix push sources and add docs / tests pushing via the python feature server ([#2561](https://github.com/feast-dev/feast/issues/2561)) ([c5006c2](https://github.com/feast-dev/feast/commit/c5006c2cf47fd489d8f740d300f06b8fab387148)) +* Fixed data mapping errors for Snowflake ([#2558](https://github.com/feast-dev/feast/issues/2558)) ([abd6be7](https://github.com/feast-dev/feast/commit/abd6be73ec0b795e1ea043d9db2744156f04c5d3)) +* Small typo in CLI ([#2578](https://github.com/feast-dev/feast/issues/2578)) ([8717bc8](https://github.com/feast-dev/feast/commit/8717bc8c19be13158eb7c3de42d38383803195b9)) +* Switch from `join_key` to `join_keys` in tests and docs ([#2580](https://github.com/feast-dev/feast/issues/2580)) ([6130b80](https://github.com/feast-dev/feast/commit/6130b80f64b0952ed209213a371d959f41b9a350)) +* Update build_go_protos to use a consistent python path ([#2550](https://github.com/feast-dev/feast/issues/2550)) ([1c523bf](https://github.com/feast-dev/feast/commit/1c523bf8acd1d554efa4b6211420185f2b66ec36)) +* Update RedisCluster to use redis-py official implementation ([#2554](https://github.com/feast-dev/feast/issues/2554)) ([c47fa2a](https://github.com/feast-dev/feast/commit/c47fa2a58ddaee892095b867a022cfcf236ff7c1)) +* Use cwd when getting module path ([#2577](https://github.com/feast-dev/feast/issues/2577)) ([28752f2](https://github.com/feast-dev/feast/commit/28752f23a365716d98b9266d449ee0aa0572165f)) + +# [0.20.0](https://github.com/feast-dev/feast/compare/v0.19.0...v0.20.0) (2022-04-14) + + +### Bug Fixes + +* Add inlined data sources to the top level registry ([#2456](https://github.com/feast-dev/feast/issues/2456)) ([356788a](https://github.com/feast-dev/feast/commit/356788a4d3c3677f60512b3e841498245289d9a2)) +* Add new value types to types.ts for web ui ([#2463](https://github.com/feast-dev/feast/issues/2463)) ([ad5694e](https://github.com/feast-dev/feast/commit/ad5694e601559ca1d8f8d3c91307c3c2cae1c69a)) +* Add PushSource proto and Python class ([#2428](https://github.com/feast-dev/feast/issues/2428)) ([9a4bd63](https://github.com/feast-dev/feast/commit/9a4bd63f62b54b76148c069c7665a3067553c456)) +* Add spark to lambda dockerfile ([#2480](https://github.com/feast-dev/feast/issues/2480)) ([514666f](https://github.com/feast-dev/feast/commit/514666f31cf6456dee59a5db750d8bd4ec63d8be)) +* Added private_key auth for Snowflake ([#2508](https://github.com/feast-dev/feast/issues/2508)) ([c42c9b0](https://github.com/feast-dev/feast/commit/c42c9b0f2a962e428d9af0d3e7ba080a62d77008)) +* Added Redshift and Spark typecheck to data_source event_timestamp_col inference ([#2389](https://github.com/feast-dev/feast/issues/2389)) ([04dea73](https://github.com/feast-dev/feast/commit/04dea732a495dfc8fa3dd006c89f73c6d08097c5)) +* Building of go extension fails ([#2448](https://github.com/feast-dev/feast/issues/2448)) ([7d1efd5](https://github.com/feast-dev/feast/commit/7d1efd56387b4a8c373c69f062ea31214f33c5f1)) +* Bump the number of versions bumps expected to 27 ([#2549](https://github.com/feast-dev/feast/issues/2549)) ([ecc9938](https://github.com/feast-dev/feast/commit/ecc9938774e9446f54484272bb3677e8d2fc1682)) +* Create __init__ files for the proto-generated python dirs ([#2410](https://github.com/feast-dev/feast/issues/2410)) ([e17028d](https://github.com/feast-dev/feast/commit/e17028dbb90cb88d9df9a9769abbd90c5c301563)) +* Don't prevent apply from running given duplicate empty names in data sources. Also fix repeated apply of Spark data source. ([#2415](https://github.com/feast-dev/feast/issues/2415)) ([b95f441](https://github.com/feast-dev/feast/commit/b95f4410ee91069ff84e81d2d5f3e9329edc8626)) +* Dynamodb deduplicate batch write request by partition keys ([#2515](https://github.com/feast-dev/feast/issues/2515)) ([70d4a13](https://github.com/feast-dev/feast/commit/70d4a1335312747521fc57e7742abf7eb85c12a6)) +* Ensure that __init__ files exist in proto dirs ([#2433](https://github.com/feast-dev/feast/issues/2433)) ([9b94f7b](https://github.com/feast-dev/feast/commit/9b94f7b35221af0bf1f813ab47e2f52e37a79ca6)) +* Fix DataSource constructor to unbreak custom data sources ([#2492](https://github.com/feast-dev/feast/issues/2492)) ([712653e](https://github.com/feast-dev/feast/commit/712653e91cc4dc51174058a850f00a471c7269b9)) +* Fix default feast apply path without any extras ([#2373](https://github.com/feast-dev/feast/issues/2373)) ([6ba7fc7](https://github.com/feast-dev/feast/commit/6ba7fc789da34f0d239d7cae1b3a95e14eae9e2f)) +* Fix definitions.py with new definition ([#2541](https://github.com/feast-dev/feast/issues/2541)) ([eefc34a](https://github.com/feast-dev/feast/commit/eefc34a5da978ccc686bbc2586b4af5de3917ff1)) +* Fix entity row to use join key instead of name ([#2521](https://github.com/feast-dev/feast/issues/2521)) ([c22fa2c](https://github.com/feast-dev/feast/commit/c22fa2c42c2c2b6b84c1876f12d690806ea8d431)) +* Fix Java Master ([#2499](https://github.com/feast-dev/feast/issues/2499)) ([e083458](https://github.com/feast-dev/feast/commit/e0834587ec1450026a17a7c18f2708d44410da07)) +* Fix registry proto ([#2435](https://github.com/feast-dev/feast/issues/2435)) ([ea6a9b2](https://github.com/feast-dev/feast/commit/ea6a9b2034c35bf36ee5073fad93dde52279ebcd)) +* Fix some inconsistencies in the docs and comments in the code ([#2444](https://github.com/feast-dev/feast/issues/2444)) ([ad008bf](https://github.com/feast-dev/feast/commit/ad008bfd1d826a72247accf993a58505a9e2b308)) +* Fix spark docs ([#2382](https://github.com/feast-dev/feast/issues/2382)) ([d4a606a](https://github.com/feast-dev/feast/commit/d4a606ad68fedeb12839038a956043a66e6f518f)) +* Fix Spark template to work correctly on feast init -t spark ([#2393](https://github.com/feast-dev/feast/issues/2393)) ([ae133fd](https://github.com/feast-dev/feast/commit/ae133fd3ee55da3ff2fb2848456de527935ee94f)) +* Fix the feature repo fixture used by java tests ([#2469](https://github.com/feast-dev/feast/issues/2469)) ([32e925e](https://github.com/feast-dev/feast/commit/32e925e5edebef98449e295e2b1cf0f98ceb8d77)) +* Fix unhashable Snowflake and Redshift sources ([cd8f1c9](https://github.com/feast-dev/feast/commit/cd8f1c9d3a07231343631692b6de67e7f5c93b8f)) +* Fixed bug in passing config file params to snowflake python connector ([#2503](https://github.com/feast-dev/feast/issues/2503)) ([34f2b59](https://github.com/feast-dev/feast/commit/34f2b591bc8de3af59dfd0031ff803b4fe028abd)) +* Fixing Spark template to include source name ([#2381](https://github.com/feast-dev/feast/issues/2381)) ([a985f1d](https://github.com/feast-dev/feast/commit/a985f1d310915bafba3074f9cea776232b956e29)) +* Make name a keyword arg for the Entity class ([#2467](https://github.com/feast-dev/feast/issues/2467)) ([43847de](https://github.com/feast-dev/feast/commit/43847dea74b25bb2081d76b1fd5e43da8cbc0781)) +* Making a name for data sources not a breaking change ([#2379](https://github.com/feast-dev/feast/issues/2379)) ([71d7ae2](https://github.com/feast-dev/feast/commit/71d7ae26f0357646a15a41f7c2b53a0cab1b7c12)) +* Minor link fix in `CONTRIBUTING.md` ([#2481](https://github.com/feast-dev/feast/issues/2481)) ([2917e27](https://github.com/feast-dev/feast/commit/2917e272ab538955d564552a56ef36bb3f75f646)) +* Preserve ordering of features in _get_column_names ([#2457](https://github.com/feast-dev/feast/issues/2457)) ([495b435](https://github.com/feast-dev/feast/commit/495b4353051d9d48886407f2ca7de19c3eb458d7)) +* Relax click python requirement to >=7 ([#2450](https://github.com/feast-dev/feast/issues/2450)) ([f202f92](https://github.com/feast-dev/feast/commit/f202f928ff02f4a263f3b4ce450051e526304c51)) +* Remove date partition column field from datasources that don't s… ([#2478](https://github.com/feast-dev/feast/issues/2478)) ([ce35835](https://github.com/feast-dev/feast/commit/ce35835413d4f5bc174eb177e277be96e8bb9673)) +* Remove docker step from unit test workflow ([#2535](https://github.com/feast-dev/feast/issues/2535)) ([6f22f22](https://github.com/feast-dev/feast/commit/6f22f227a22bea6f9ef171764a4c5e3d74b310ea)) +* Remove spark from the AWS Lambda dockerfile ([#2498](https://github.com/feast-dev/feast/issues/2498)) ([6abae16](https://github.com/feast-dev/feast/commit/6abae16803ef4644c91822626820e99b2f7cfbbd)) +* Request data api update ([#2488](https://github.com/feast-dev/feast/issues/2488)) ([0c9e5b7](https://github.com/feast-dev/feast/commit/0c9e5b7e2132b619056e9b41519d54a93e977f6c)) +* Schema update ([#2509](https://github.com/feast-dev/feast/issues/2509)) ([cf7bbc2](https://github.com/feast-dev/feast/commit/cf7bbc2226e2c3d652df1ddfc411a1feadda26c4)) +* Simplify DataSource.from_proto logic ([#2424](https://github.com/feast-dev/feast/issues/2424)) ([6bda4d2](https://github.com/feast-dev/feast/commit/6bda4d2b2080e5a97333e14863f2235aed989661)) +* Snowflake api update ([#2487](https://github.com/feast-dev/feast/issues/2487)) ([1181a9e](https://github.com/feast-dev/feast/commit/1181a9e62335ee00ee54cd1bc88baa6297238988)) +* Support passing batch source to streaming sources for backfills ([#2523](https://github.com/feast-dev/feast/issues/2523)) ([90db1d1](https://github.com/feast-dev/feast/commit/90db1d1bb079a76a6a26a89a9c70b9f80bb8b08a)) +* Timestamp update ([#2486](https://github.com/feast-dev/feast/issues/2486)) ([bf23111](https://github.com/feast-dev/feast/commit/bf23111acedf0b5180e368e1cffbdeb0da5a8f30)) +* Typos in Feast UI error message ([#2432](https://github.com/feast-dev/feast/issues/2432)) ([e14369d](https://github.com/feast-dev/feast/commit/e14369def00c116e05c91475c29fa32ab2fa8aeb)) +* Update feature view APIs to prefer keyword args ([#2472](https://github.com/feast-dev/feast/issues/2472)) ([7c19cf7](https://github.com/feast-dev/feast/commit/7c19cf7fe3e42674a357e54a6576c772a7bcb107)) +* Update file api ([#2470](https://github.com/feast-dev/feast/issues/2470)) ([83a11c6](https://github.com/feast-dev/feast/commit/83a11c6b3af3b78653fe612989a5f5227cb773c1)) +* Update Makefile to cd into python dir before running commands ([#2437](https://github.com/feast-dev/feast/issues/2437)) ([ca32155](https://github.com/feast-dev/feast/commit/ca3215502bc4c92d0635efaceae22acc914c73a8)) +* Update redshift api ([#2479](https://github.com/feast-dev/feast/issues/2479)) ([4fa73a9](https://github.com/feast-dev/feast/commit/4fa73a9c1fc5f003a5662e497d77e69efdbae6c1)) +* Update some fields optional in UI parser ([#2380](https://github.com/feast-dev/feast/issues/2380)) ([cff7ac3](https://github.com/feast-dev/feast/commit/cff7ac3c6440a78a7fa460b2f19bc07d9ea5aab2)) +* Use a single version of jackson libraries and upgrade to 2.12.6.1 ([#2473](https://github.com/feast-dev/feast/issues/2473)) ([5be1cc6](https://github.com/feast-dev/feast/commit/5be1cc6768c123b592c26099d12f3469f575a52c)) +* Use dateutil parser to parse materialization times ([#2464](https://github.com/feast-dev/feast/issues/2464)) ([6c55e49](https://github.com/feast-dev/feast/commit/6c55e49898e9b5f5aa77fe87ac0807984937a708)) +* Use the correct dockerhub image tag when building feature servers ([#2372](https://github.com/feast-dev/feast/issues/2372)) ([0d62c1d](https://github.com/feast-dev/feast/commit/0d62c1d34b02cd67e13e545d081b90fe1562261e)) + + +### Features + +* Add `/write-to-online-store` method to the python feature server ([#2423](https://github.com/feast-dev/feast/issues/2423)) ([d2fb048](https://github.com/feast-dev/feast/commit/d2fb0487134f011c1a0f217f1a325e3bf6a52d27)) +* Add description, tags, owner fields to all feature view classes ([#2440](https://github.com/feast-dev/feast/issues/2440)) ([ed5e928](https://github.com/feast-dev/feast/commit/ed5e9282678c943c750c5e9d84037376dd1380d3)) +* Add DQM Logging on GRPC Server with FileLogStorage for Testing ([#2403](https://github.com/feast-dev/feast/issues/2403)) ([57a97d8](https://github.com/feast-dev/feast/commit/57a97d8e207e38876901b9a6b1d6e0f7cc6ce43d)) +* Add Feast types in preparation for changing type system ([#2475](https://github.com/feast-dev/feast/issues/2475)) ([4864252](https://github.com/feast-dev/feast/commit/4864252db4e8ef6effb596c689bd17ec884ebffe)) +* Add Field class ([#2500](https://github.com/feast-dev/feast/issues/2500)) ([1279612](https://github.com/feast-dev/feast/commit/1279612fe64d8d24fa57d873f4a617095adc5feb)) +* Add support for DynamoDB online_read in batches ([#2371](https://github.com/feast-dev/feast/issues/2371)) ([702ec49](https://github.com/feast-dev/feast/commit/702ec498eff2fb079d5403e9471b73ae840d6d7f)) +* Add Support for DynamodbOnlineStoreConfig endpoint_url parameter ([#2485](https://github.com/feast-dev/feast/issues/2485)) ([7b863d1](https://github.com/feast-dev/feast/commit/7b863d14e8442007b1403e467130b974b3aba078)) +* Add templating for dynamodb table name ([#2394](https://github.com/feast-dev/feast/issues/2394)) ([f591088](https://github.com/feast-dev/feast/commit/f59108806c6819bcd42828244a2abc66471cd8f8)) +* Allow local feature server to use Go feature server if enabled ([#2538](https://github.com/feast-dev/feast/issues/2538)) ([a2ef375](https://github.com/feast-dev/feast/commit/a2ef3755766a7224298f984a2cadb17829b2f3d2)) +* Allow using entity's join_key in get_online_features ([#2420](https://github.com/feast-dev/feast/issues/2420)) ([068c765](https://github.com/feast-dev/feast/commit/068c765ccf5f984bc0a73a8354711f39a5c17da7)) +* Data Source Api Update ([#2468](https://github.com/feast-dev/feast/issues/2468)) ([6b96b21](https://github.com/feast-dev/feast/commit/6b96b21a32cc6dc3b44b48289711a66b2c33512d)) +* Go server ([#2339](https://github.com/feast-dev/feast/issues/2339)) ([d12e7ef](https://github.com/feast-dev/feast/commit/d12e7ef3b9c79cae30a2401c9ae6c0ac783c4b6b)), closes [#2354](https://github.com/feast-dev/feast/issues/2354) [#2361](https://github.com/feast-dev/feast/issues/2361) [#2332](https://github.com/feast-dev/feast/issues/2332) [#2356](https://github.com/feast-dev/feast/issues/2356) [#2363](https://github.com/feast-dev/feast/issues/2363) [#2349](https://github.com/feast-dev/feast/issues/2349) [#2355](https://github.com/feast-dev/feast/issues/2355) [#2336](https://github.com/feast-dev/feast/issues/2336) [#2361](https://github.com/feast-dev/feast/issues/2361) [#2363](https://github.com/feast-dev/feast/issues/2363) [#2344](https://github.com/feast-dev/feast/issues/2344) [#2354](https://github.com/feast-dev/feast/issues/2354) [#2347](https://github.com/feast-dev/feast/issues/2347) [#2350](https://github.com/feast-dev/feast/issues/2350) [#2356](https://github.com/feast-dev/feast/issues/2356) [#2355](https://github.com/feast-dev/feast/issues/2355) [#2349](https://github.com/feast-dev/feast/issues/2349) [#2352](https://github.com/feast-dev/feast/issues/2352) [#2341](https://github.com/feast-dev/feast/issues/2341) [#2336](https://github.com/feast-dev/feast/issues/2336) [#2373](https://github.com/feast-dev/feast/issues/2373) [#2315](https://github.com/feast-dev/feast/issues/2315) [#2372](https://github.com/feast-dev/feast/issues/2372) [#2332](https://github.com/feast-dev/feast/issues/2332) [#2349](https://github.com/feast-dev/feast/issues/2349) [#2336](https://github.com/feast-dev/feast/issues/2336) [#2361](https://github.com/feast-dev/feast/issues/2361) [#2363](https://github.com/feast-dev/feast/issues/2363) [#2344](https://github.com/feast-dev/feast/issues/2344) [#2354](https://github.com/feast-dev/feast/issues/2354) [#2347](https://github.com/feast-dev/feast/issues/2347) [#2350](https://github.com/feast-dev/feast/issues/2350) [#2356](https://github.com/feast-dev/feast/issues/2356) [#2355](https://github.com/feast-dev/feast/issues/2355) [#2349](https://github.com/feast-dev/feast/issues/2349) [#2352](https://github.com/feast-dev/feast/issues/2352) [#2341](https://github.com/feast-dev/feast/issues/2341) [#2336](https://github.com/feast-dev/feast/issues/2336) [#2373](https://github.com/feast-dev/feast/issues/2373) [#2379](https://github.com/feast-dev/feast/issues/2379) [#2380](https://github.com/feast-dev/feast/issues/2380) [#2382](https://github.com/feast-dev/feast/issues/2382) [#2364](https://github.com/feast-dev/feast/issues/2364) [#2366](https://github.com/feast-dev/feast/issues/2366) [#2386](https://github.com/feast-dev/feast/issues/2386) +* Graduate write_to_online_store out of experimental status ([#2426](https://github.com/feast-dev/feast/issues/2426)) ([e7dd4b7](https://github.com/feast-dev/feast/commit/e7dd4b75ba0fbd86338aacf2ecd0cc8979dc803b)) +* Make feast PEP 561 compliant ([#2405](https://github.com/feast-dev/feast/issues/2405)) ([3c41f94](https://github.com/feast-dev/feast/commit/3c41f944c68fb6687389c0b154f7297941a1f398)), closes [#2420](https://github.com/feast-dev/feast/issues/2420) [#2418](https://github.com/feast-dev/feast/issues/2418) [#2425](https://github.com/feast-dev/feast/issues/2425) [#2426](https://github.com/feast-dev/feast/issues/2426) [#2427](https://github.com/feast-dev/feast/issues/2427) [#2431](https://github.com/feast-dev/feast/issues/2431) [#2433](https://github.com/feast-dev/feast/issues/2433) [#2420](https://github.com/feast-dev/feast/issues/2420) [#2418](https://github.com/feast-dev/feast/issues/2418) [#2425](https://github.com/feast-dev/feast/issues/2425) [#2426](https://github.com/feast-dev/feast/issues/2426) [#2427](https://github.com/feast-dev/feast/issues/2427) [#2431](https://github.com/feast-dev/feast/issues/2431) [#2433](https://github.com/feast-dev/feast/issues/2433) +* Makefile for contrib for Issue [#2364](https://github.com/feast-dev/feast/issues/2364) ([#2366](https://github.com/feast-dev/feast/issues/2366)) ([a02325b](https://github.com/feast-dev/feast/commit/a02325b20f4d1a949ebb1f26bd3b65a22e3ea4f3)) +* Support on demand feature views in go feature server ([#2494](https://github.com/feast-dev/feast/issues/2494)) ([6edd274](https://github.com/feast-dev/feast/commit/6edd274261689b8c67df31d598aa5d5b14dcf5f7)) +* Switch from Feature to Field ([#2514](https://github.com/feast-dev/feast/issues/2514)) ([6a03bed](https://github.com/feast-dev/feast/commit/6a03bed82bf408d2f46d209be7ac9524b4ea6dcd)) +* Use a daemon thread to monitor the go feature server exclusively ([#2391](https://github.com/feast-dev/feast/issues/2391)) ([0bb5e8c](https://github.com/feast-dev/feast/commit/0bb5e8c5a91f6f986f879f965b84e987e71a9d88)) + +# [0.19.0](https://github.com/feast-dev/feast/compare/v0.18.0...v0.19.0) (2022-03-05) + + +### Bug Fixes + +* Added additional value types to UI parser and removed references to registry-bq.json ([#2361](https://github.com/feast-dev/feast/issues/2361)) ([d202d51](https://github.com/feast-dev/feast/commit/d202d5170b7e6bf1e1b0f103aac247bfc04c2760)) +* Fix Redshift bug that stops waiting on statements after 5 minutes ([#2363](https://github.com/feast-dev/feast/issues/2363)) ([74f887f](https://github.com/feast-dev/feast/commit/74f887f72e109147a2e6214c2e3baade63d55fbe)) +* Method _should_use_plan only returns true for local sqlite provider ([#2344](https://github.com/feast-dev/feast/issues/2344)) ([fdb5f21](https://github.com/feast-dev/feast/commit/fdb5f215fe7f4946a6c88cfb34dc0c24eb5ac733)) +* Remove redis service to prevent more conflicts and add redis node to master_only ([#2354](https://github.com/feast-dev/feast/issues/2354)) ([993616f](https://github.com/feast-dev/feast/commit/993616fe152d097e2d918e5f56fc13e2e409175c)) +* Rollback Redis-py to Redis-py-cluster ([#2347](https://github.com/feast-dev/feast/issues/2347)) ([1ba86fb](https://github.com/feast-dev/feast/commit/1ba86fb0cc7f2e86b8c70477462faa68075f99cd)) +* Update github workflow to prevent redis from overlapping ports. ([#2350](https://github.com/feast-dev/feast/issues/2350)) ([c2a6c6c](https://github.com/feast-dev/feast/commit/c2a6c6cc34d64fd3483f0b2cef394f833e9124e2)) + + +### Features + +* Add owner field to Entity and rename labels to tags ([412d625](https://github.com/feast-dev/feast/commit/412d62516da184350de473c87dd2ab369ad4cf86)) +* Allow all snowflake python connector connection methods to be available to Feast ([#2356](https://github.com/feast-dev/feast/issues/2356)) ([ec7385c](https://github.com/feast-dev/feast/commit/ec7385c16f6899c5388b7a786fce6e0d543776f5)) +* Allowing password based authentication and SSL for Redis in Java feature server ([0af8adb](https://github.com/feast-dev/feast/commit/0af8adba1c56a2a29d02c2fc5aeccf51ee78fd5f)) +* Event timestamps response ([#2355](https://github.com/feast-dev/feast/issues/2355)) ([5481caf](https://github.com/feast-dev/feast/commit/5481caf37989c347bf4469f5f081f4f15f20fdb7)) +* Feast Spark Offline Store ([#2349](https://github.com/feast-dev/feast/issues/2349)) ([98b8d8d](https://github.com/feast-dev/feast/commit/98b8d8da7e8e717ec93197f253368551951ca675)) +* Initial merge of Web UI logic ([#2352](https://github.com/feast-dev/feast/issues/2352)) ([ce3bc59](https://github.com/feast-dev/feast/commit/ce3bc598501949f82197c7fa2409319282197276)) +* Key ttl setting for redis online store ([#2341](https://github.com/feast-dev/feast/issues/2341)) ([236a108](https://github.com/feast-dev/feast/commit/236a108c87aed106e0a46e48172d31dc94ed9c2b)) +* Metadata changes & making data sources top level objects to power Feast UI ([#2336](https://github.com/feast-dev/feast/issues/2336)) ([43da230](https://github.com/feast-dev/feast/commit/43da2302dfcbf3b5e56ed068021b5821d544c05f)) + +# [v0.18.1](https://github.com/feast-dev/feast/tree/v0.18.1) (2022-02-15) + +[Full Changelog](https://github.com/feast-dev/feast/compare/v0.18.0...v0.18.1) + +### Bug Fixes + +- ODFVs raise a PerformanceWarning for very large sets of features [\#2293](https://github.com/feast-dev/feast/issues/2293) +- Don't require `snowflake` to always be installed [\#2309](https://github.com/feast-dev/feast/pull/2309) ([judahrand](https://github.com/judahrand)) +- podAnnotations Values in the feature-server chart [\#2304](https://github.com/feast-dev/feast/pull/2304) ([tpvasconcelos](https://github.com/tpvasconcelos)) +- Fixing the Java helm charts and adding a demo tutorial on how to use them [\#2298](https://github.com/feast-dev/feast/pull/2298) ([adchia](https://github.com/adchia)) +- avoid using transactions on OSS Redis [\#2296](https://github.com/feast-dev/feast/pull/2296) ([DvirDukhan](https://github.com/DvirDukhan)) +- Include infra objects in registry dump and fix Infra's from\_proto [\#2295](https://github.com/feast-dev/feast/pull/2295) ([adchia](https://github.com/adchia)) +- Expose snowflake credentials for unit testing [\#2288](https://github.com/feast-dev/feast/pull/2288) ([sfc-gh-madkins](https://github.com/sfc-gh-madkins)) +- Fix flaky tests \(test\_online\_store\_cleanup & test\_feature\_get\_online\_features\_types\_match\) [\#2276](https://github.com/feast-dev/feast/pull/2276) ([pyalex](https://github.com/pyalex)) + +### Merged Pull Requests + +- Remove old flag warning with the python feature server [\#2300](https://github.com/feast-dev/feast/pull/2300) ([adchia](https://github.com/adchia)) +- Use an OFFLINE schema for Snowflake offline store tests [\#2291](https://github.com/feast-dev/feast/pull/2291) ([sfc-gh-madkins](https://github.com/sfc-gh-madkins)) +- fix typos in markdown files [\#2289](https://github.com/feast-dev/feast/pull/2289) ([charliec443](https://github.com/charliec443)) +- Add -SNAPSHOT suffix to pom.xml version [\#2286](https://github.com/feast-dev/feast/pull/2286) ([tsotnet](https://github.com/tsotnet)) +- Update CONTRIBUTING.md [\#2282](https://github.com/feast-dev/feast/pull/2282) ([adchia](https://github.com/adchia)) + +# [v0.18.0](https://github.com/feast-dev/feast/tree/v0.18.0) (2022-02-05) + +[Full Changelog](https://github.com/feast-dev/feast/compare/v0.17.0...v0.18.0) + +### Features + +- Tutorial on validation of historical features [\#2277](https://github.com/feast-dev/feast/pull/2277) ([pyalex](https://github.com/pyalex)) +- Feast plan clean up [\#2256](https://github.com/feast-dev/feast/pull/2256) ([felixwang9817](https://github.com/felixwang9817)) +- Return `UNIX\_TIMESTAMP` as Python `datetime` [\#2244](https://github.com/feast-dev/feast/pull/2244) ([judahrand](https://github.com/judahrand)) +- Validating historical features against reference dataset with "great expectations" profiler [\#2243](https://github.com/feast-dev/feast/pull/2243) ([pyalex](https://github.com/pyalex)) +- Implement feature\_store.\_apply\_diffs to handle registry and infra diffs [\#2238](https://github.com/feast-dev/feast/pull/2238) ([felixwang9817](https://github.com/felixwang9817)) +- Compare Python objects instead of proto objects [\#2227](https://github.com/feast-dev/feast/pull/2227) ([felixwang9817](https://github.com/felixwang9817)) +- Modify feature\_store.plan to produce an InfraDiff [\#2211](https://github.com/feast-dev/feast/pull/2211) ([felixwang9817](https://github.com/felixwang9817)) +- Implement diff\_infra\_protos method for feast plan [\#2204](https://github.com/feast-dev/feast/pull/2204) ([felixwang9817](https://github.com/felixwang9817)) +- Persisting results of historical retrieval [\#2197](https://github.com/feast-dev/feast/pull/2197) ([pyalex](https://github.com/pyalex)) +- Merge feast-snowflake plugin into main repo with documentation [\#2193](https://github.com/feast-dev/feast/pull/2193) ([sfc-gh-madkins](https://github.com/sfc-gh-madkins)) +- Add InfraDiff class for feast plan [\#2190](https://github.com/feast-dev/feast/pull/2190) ([felixwang9817](https://github.com/felixwang9817)) +- Use FeatureViewProjection instead of FeatureView in ODFV [\#2186](https://github.com/feast-dev/feast/pull/2186) ([judahrand](https://github.com/judahrand)) + +### Bug Fixes + +- Set `created\_timestamp` and `last\_updated\_timestamp` fields [\#2266](https://github.com/feast-dev/feast/pull/2266) ([judahrand](https://github.com/judahrand)) +- Use `datetime.utcnow\(\)` to avoid timezone issues [\#2265](https://github.com/feast-dev/feast/pull/2265) ([judahrand](https://github.com/judahrand)) +- Fix Redis key serialization in java feature server [\#2264](https://github.com/feast-dev/feast/pull/2264) ([pyalex](https://github.com/pyalex)) +- modify registry.db s3 object initialization to work in S3 subdirectory with Java Feast Server [\#2259](https://github.com/feast-dev/feast/pull/2259) ([NalinGHub](https://github.com/NalinGHub)) +- Add snowflake environment variables to allow testing on snowflake infra [\#2258](https://github.com/feast-dev/feast/pull/2258) ([sfc-gh-madkins](https://github.com/sfc-gh-madkins)) +- Correct inconsistent dependency [\#2255](https://github.com/feast-dev/feast/pull/2255) ([judahrand](https://github.com/judahrand)) +- Fix for historical field mappings [\#2252](https://github.com/feast-dev/feast/pull/2252) ([michelle-rascati-sp](https://github.com/michelle-rascati-sp)) +- Add backticks to left\_table\_query\_string [\#2250](https://github.com/feast-dev/feast/pull/2250) ([dmille](https://github.com/dmille)) +- Fix inference of BigQuery ARRAY types. [\#2245](https://github.com/feast-dev/feast/pull/2245) ([judahrand](https://github.com/judahrand)) +- Fix Redshift data creator [\#2242](https://github.com/feast-dev/feast/pull/2242) ([felixwang9817](https://github.com/felixwang9817)) +- Delete entity key from Redis only when all attached feature views are gone [\#2240](https://github.com/feast-dev/feast/pull/2240) ([pyalex](https://github.com/pyalex)) +- Tests for transformation service integration in java feature server [\#2236](https://github.com/feast-dev/feast/pull/2236) ([pyalex](https://github.com/pyalex)) +- Feature server helm chart produces invalid YAML [\#2234](https://github.com/feast-dev/feast/pull/2234) ([pyalex](https://github.com/pyalex)) +- Docker build fails for java feature server [\#2230](https://github.com/feast-dev/feast/pull/2230) ([pyalex](https://github.com/pyalex)) +- Fix ValueType.UNIX\_TIMESTAMP conversions [\#2219](https://github.com/feast-dev/feast/pull/2219) ([judahrand](https://github.com/judahrand)) +- Add on demand feature views deletion [\#2203](https://github.com/feast-dev/feast/pull/2203) ([corentinmarek](https://github.com/corentinmarek)) +- Compare only specs in integration tests [\#2200](https://github.com/feast-dev/feast/pull/2200) ([felixwang9817](https://github.com/felixwang9817)) +- Bump log4j-core from 2.17.0 to 2.17.1 in /java [\#2189](https://github.com/feast-dev/feast/pull/2189) ([dependabot[bot]](https://github.com/apps/dependabot)) +- Support multiple application properties files \(incl from classpath\) [\#2187](https://github.com/feast-dev/feast/pull/2187) ([pyalex](https://github.com/pyalex)) +- Avoid requesting features from OnlineStore twice [\#2185](https://github.com/feast-dev/feast/pull/2185) ([judahrand](https://github.com/judahrand)) +- Speed up Datastore deletes by batch deletions with multithreading [\#2182](https://github.com/feast-dev/feast/pull/2182) ([ptoman-pa](https://github.com/ptoman-pa)) +- Fixes large payload runtime exception in Datastore \(issue 1633\) [\#2181](https://github.com/feast-dev/feast/pull/2181) ([ptoman-pa](https://github.com/ptoman-pa)) + +### Merged Pull Requests + +- Add link to community plugin for Spark offline store [\#2279](https://github.com/feast-dev/feast/pull/2279) ([adchia](https://github.com/adchia)) +- Fix broken links on documentation [\#2278](https://github.com/feast-dev/feast/pull/2278) ([adchia](https://github.com/adchia)) +- Publish alternative python package with FEAST\_USAGE=False by default [\#2275](https://github.com/feast-dev/feast/pull/2275) ([pyalex](https://github.com/pyalex)) +- Unify all helm charts versions [\#2274](https://github.com/feast-dev/feast/pull/2274) ([pyalex](https://github.com/pyalex)) +- Fix / update helm chart workflows to push the feast python server [\#2273](https://github.com/feast-dev/feast/pull/2273) ([adchia](https://github.com/adchia)) +- Update Feast Serving documentation with ways to run and debug locally [\#2272](https://github.com/feast-dev/feast/pull/2272) ([adchia](https://github.com/adchia)) +- Fix Snowflake docs [\#2270](https://github.com/feast-dev/feast/pull/2270) ([felixwang9817](https://github.com/felixwang9817)) +- Update local-feature-server.md [\#2269](https://github.com/feast-dev/feast/pull/2269) ([tsotnet](https://github.com/tsotnet)) +- Update docs to include Snowflake/DQM and removing unused docs from old versions of Feast [\#2268](https://github.com/feast-dev/feast/pull/2268) ([adchia](https://github.com/adchia)) +- Graduate Python feature server [\#2263](https://github.com/feast-dev/feast/pull/2263) ([felixwang9817](https://github.com/felixwang9817)) +- Fix benchmark tests at HEAD by passing in Snowflake secrets [\#2262](https://github.com/feast-dev/feast/pull/2262) ([adchia](https://github.com/adchia)) +- Refactor `pa\_to\_feast\_value\_type` [\#2246](https://github.com/feast-dev/feast/pull/2246) ([judahrand](https://github.com/judahrand)) +- Allow using pandas.StringDtype to support on-demand features with STRING type [\#2229](https://github.com/feast-dev/feast/pull/2229) ([pyalex](https://github.com/pyalex)) +- Bump jackson-databind from 2.10.1 to 2.10.5.1 in /java/common [\#2228](https://github.com/feast-dev/feast/pull/2228) ([dependabot[bot]](https://github.com/apps/dependabot)) +- Split apply total parse repo [\#2226](https://github.com/feast-dev/feast/pull/2226) ([mickey-liu](https://github.com/mickey-liu)) +- Publish renamed java packages to maven central \(via Sonatype\) [\#2225](https://github.com/feast-dev/feast/pull/2225) ([pyalex](https://github.com/pyalex)) +- Make online store nullable [\#2224](https://github.com/feast-dev/feast/pull/2224) ([mirayyuce](https://github.com/mirayyuce)) +- Optimize `\_populate\_result\_rows\_from\_feature\_view` [\#2223](https://github.com/feast-dev/feast/pull/2223) ([judahrand](https://github.com/judahrand)) +- Update to newer `redis-py` [\#2221](https://github.com/feast-dev/feast/pull/2221) ([judahrand](https://github.com/judahrand)) +- Adding a local feature server test [\#2217](https://github.com/feast-dev/feast/pull/2217) ([adchia](https://github.com/adchia)) +- replace GetOnlineFeaturesResponse with GetOnlineFeaturesResponseV2 in… [\#2214](https://github.com/feast-dev/feast/pull/2214) ([tsotnet](https://github.com/tsotnet)) +- Updates to click==8.\* [\#2210](https://github.com/feast-dev/feast/pull/2210) ([diogommartins](https://github.com/diogommartins)) +- Bump protobuf-java from 3.12.2 to 3.16.1 in /java [\#2208](https://github.com/feast-dev/feast/pull/2208) ([dependabot[bot]](https://github.com/apps/dependabot)) +- Add default priority for bug reports [\#2207](https://github.com/feast-dev/feast/pull/2207) ([adchia](https://github.com/adchia)) +- Modify issue templates to automatically attach labels [\#2205](https://github.com/feast-dev/feast/pull/2205) ([adchia](https://github.com/adchia)) +- Python FeatureServer optimization [\#2202](https://github.com/feast-dev/feast/pull/2202) ([judahrand](https://github.com/judahrand)) +- Refactor all importer logic to belong in feast.importer [\#2199](https://github.com/feast-dev/feast/pull/2199) ([felixwang9817](https://github.com/felixwang9817)) +- Refactor `OnlineResponse.to\_dict\(\)` [\#2196](https://github.com/feast-dev/feast/pull/2196) ([judahrand](https://github.com/judahrand)) +- \[Java feature server\] Converge ServingService API to make Python and Java feature servers consistent [\#2166](https://github.com/feast-dev/feast/pull/2166) ([pyalex](https://github.com/pyalex)) +- Add a unit test for the tag\_proto\_objects method [\#2163](https://github.com/feast-dev/feast/pull/2163) ([achals](https://github.com/achals)) + + +# [v0.17.0](https://github.com/feast-dev/feast/tree/v0.17.0) (2021-12-31) + +[Full Changelog](https://github.com/feast-dev/feast/compare/v0.16.1...v0.17.0) + +### Features + +- Add feast-python-server helm chart [\#2177](https://github.com/feast-dev/feast/pull/2177) ([michelle-rascati-sp](https://github.com/michelle-rascati-sp)) +- Add SqliteTable as an InfraObject [\#2157](https://github.com/feast-dev/feast/pull/2157) ([felixwang9817](https://github.com/felixwang9817)) +- Compute property-level diffs for repo objects [\#2156](https://github.com/feast-dev/feast/pull/2156) ([achals](https://github.com/achals)) +- Add a feast plan command, and have CLI output differentiates between created, deleted and unchanged objects [\#2147](https://github.com/feast-dev/feast/pull/2147) ([achals](https://github.com/achals)) +- Refactor tag methods to infer created, deleted, and kept repo objects [\#2142](https://github.com/feast-dev/feast/pull/2142) ([achals](https://github.com/achals)) +- Add DatastoreTable infra object [\#2140](https://github.com/feast-dev/feast/pull/2140) ([felixwang9817](https://github.com/felixwang9817)) +- Dynamodb infra object [\#2131](https://github.com/feast-dev/feast/pull/2131) ([felixwang9817](https://github.com/felixwang9817)) +- Add Infra and InfraObjects classes [\#2125](https://github.com/feast-dev/feast/pull/2125) ([felixwang9817](https://github.com/felixwang9817)) +- Pre compute the timestamp range for feature views [\#2103](https://github.com/feast-dev/feast/pull/2103) ([judahrand](https://github.com/judahrand)) + +### Bug Fixes + +- Fix issues with java docker building [\#2178](https://github.com/feast-dev/feast/pull/2178) ([achals](https://github.com/achals)) +- unpin boto dependency in setup [\#2168](https://github.com/feast-dev/feast/pull/2168) ([fengyu05](https://github.com/fengyu05)) +- Fix issue with numpy datetimes in feast\_value\_type\_to\_pandas\_type [\#2167](https://github.com/feast-dev/feast/pull/2167) ([achals](https://github.com/achals)) +- Fix `BYTES` and `BYTES_LIST` type conversion [\#2158](https://github.com/feast-dev/feast/pull/2158) ([judahrand](https://github.com/judahrand)) +- Use correct name when deleting dynamo table [\#2154](https://github.com/feast-dev/feast/pull/2154) ([pyalex](https://github.com/pyalex)) +- Bump log4j-core from 2.15.0 to 2.16.0 in /java [\#2146](https://github.com/feast-dev/feast/pull/2146) ([dependabot[bot]](https://github.com/apps/dependabot)) +- Bump log4j-api from 2.15.0 to 2.16.0 in /java [\#2145](https://github.com/feast-dev/feast/pull/2145) ([dependabot[bot]](https://github.com/apps/dependabot)) +- Respect `full_feature_names` for ODFVs [\#2144](https://github.com/feast-dev/feast/pull/2144) ([judahrand](https://github.com/judahrand)) +- Cache dynamodb client and resource in DynamoDB online store implement… [\#2138](https://github.com/feast-dev/feast/pull/2138) ([felixwang9817](https://github.com/felixwang9817)) +- Bump log4j-api from 2.13.2 to 2.15.0 in /java [\#2133](https://github.com/feast-dev/feast/pull/2133) ([dependabot[bot]](https://github.com/apps/dependabot)) +- Fix release workflow to use the new GCP action [\#2132](https://github.com/feast-dev/feast/pull/2132) ([adchia](https://github.com/adchia)) +- Remove spring-boot from the feast serving application [\#2127](https://github.com/feast-dev/feast/pull/2127) ([achals](https://github.com/achals)) +- Fix Makefile to properly create the ECR\_VERSION [\#2123](https://github.com/feast-dev/feast/pull/2123) ([adchia](https://github.com/adchia)) + +### Closes Issues + +- In GH workflow docker images are being built but not published [\#2152](https://github.com/feast-dev/feast/issues/2152) +- Any plan to make Feast 0.10+ support docker [\#2148](https://github.com/feast-dev/feast/issues/2148) +- ODFVs don't respect `full_feature_names` [\#2143](https://github.com/feast-dev/feast/issues/2143) +- Release workflow does not work [\#2136](https://github.com/feast-dev/feast/issues/2136) +- Redis Online Store - Truncate and Load [\#2129](https://github.com/feast-dev/feast/issues/2129) + +### Merged Pull Requests + +- Update roadmap to include Snowflake + Trino. Also fix docs + update FAQ [\#2175](https://github.com/feast-dev/feast/pull/2175) ([adchia](https://github.com/adchia)) +- Convert python values into proto values in bulk [\#2172](https://github.com/feast-dev/feast/pull/2172) ([pyalex](https://github.com/pyalex)) +- Push docker image after build in GH workflow [\#2171](https://github.com/feast-dev/feast/pull/2171) ([pyalex](https://github.com/pyalex)) +- Improve serialization performance [\#2165](https://github.com/feast-dev/feast/pull/2165) ([judahrand](https://github.com/judahrand)) +- Improve online deserialization latency [\#2164](https://github.com/feast-dev/feast/pull/2164) ([judahrand](https://github.com/judahrand)) +- Add a unit test for the tag\_proto\_objects method [\#2163](https://github.com/feast-dev/feast/pull/2163) ([achals](https://github.com/achals)) +- Bump log4j-core from 2.16.0 to 2.17.0 in /java [\#2161](https://github.com/feast-dev/feast/pull/2161) ([dependabot[bot]](https://github.com/apps/dependabot)) +- \[Java Feature Server\] Use hgetall in redis connector when number of retrieved fields is big enough [\#2159](https://github.com/feast-dev/feast/pull/2159) ([pyalex](https://github.com/pyalex)) +- Do not run benchmarks on pull requests [\#2155](https://github.com/feast-dev/feast/pull/2155) ([felixwang9817](https://github.com/felixwang9817)) +- Ensure that universal CLI test tears down infrastructure [\#2151](https://github.com/feast-dev/feast/pull/2151) ([felixwang9817](https://github.com/felixwang9817)) +- Remove underscores from ECR docker versions [\#2139](https://github.com/feast-dev/feast/pull/2139) ([achals](https://github.com/achals)) +- Run PR integration tests only on python 3.7 [\#2137](https://github.com/feast-dev/feast/pull/2137) ([achals](https://github.com/achals)) +- Update changelog for 0.16.1 and update helm charts [\#2135](https://github.com/feast-dev/feast/pull/2135) ([adchia](https://github.com/adchia)) +- Bump log4j-core from 2.13.2 to 2.15.0 in /java [\#2134](https://github.com/feast-dev/feast/pull/2134) ([dependabot[bot]](https://github.com/apps/dependabot)) +- Updating lambda docker image to feature-server-python-aws [\#2130](https://github.com/feast-dev/feast/pull/2130) ([adchia](https://github.com/adchia)) +- Fix README to reflect new integration test suites [\#2124](https://github.com/feast-dev/feast/pull/2124) ([adchia](https://github.com/adchia)) +- Change the feast serve endpoint to be sync rather than async. [\#2119](https://github.com/feast-dev/feast/pull/2119) ([nossrannug](https://github.com/nossrannug)) +- Remove argument `feature_refs` [\#2115](https://github.com/feast-dev/feast/pull/2115) ([judahrand](https://github.com/judahrand)) +- Fix leaking dynamodb tables in integration tests [\#2104](https://github.com/feast-dev/feast/pull/2104) ([pyalex](https://github.com/pyalex)) +- Remove untested and undocumented interfaces [\#2084](https://github.com/feast-dev/feast/pull/2084) ([judahrand](https://github.com/judahrand)) +- Update creating-a-custom-provider.md [\#2070](https://github.com/feast-dev/feast/pull/2070) ([ChaitanyaKN](https://github.com/ChaitanyaKN)) +# [v0.16.1](https://github.com/feast-dev/feast/tree/v0.16.1) (2021-12-10) + +[Full Changelog](https://github.com/feast-dev/feast/compare/v0.16.0...v0.16.1) + +### Bug Fixes + +- Bump log4j-api from 2.13.2 to 2.15.0 in /java [\#2133](https://github.com/feast-dev/feast/pull/2133) ([dependabot[bot]](https://github.com/apps/dependabot)) +- Fix release workflow to use the new GCP action [\#2132](https://github.com/feast-dev/feast/pull/2132) ([adchia](https://github.com/adchia)) +- Fix Makefile to properly create the ECR\_VERSION [\#2123](https://github.com/feast-dev/feast/pull/2123) ([adchia](https://github.com/adchia)) + +### Merged Pull Requests + +- Updating lambda docker image to feature-server-python-aws [\#2130](https://github.com/feast-dev/feast/pull/2130) ([adchia](https://github.com/adchia)) +- Fix README to reflect new integration test suites [\#2124](https://github.com/feast-dev/feast/pull/2124) ([adchia](https://github.com/adchia)) +- Remove argument `feature_refs` [\#2115](https://github.com/feast-dev/feast/pull/2115) ([judahrand](https://github.com/judahrand)) + +# [v0.16.0](https://github.com/feast-dev/feast/tree/v0.16.0) (2021-12-08) + +[Full Changelog](https://github.com/feast-dev/feast/compare/v0.15.1...v0.16.0) + +### Features + +- Install redis extra in AWS Lambda feature server & add hiredis depend… [\#2057](https://github.com/feast-dev/feast/pull/2057) ([tsotnet](https://github.com/tsotnet)) +- Support of GC and S3 storages for registry in Java Feature Server [\#2043](https://github.com/feast-dev/feast/pull/2043) ([pyalex](https://github.com/pyalex)) +- Adding stream ingestion alpha documentation [\#2005](https://github.com/feast-dev/feast/pull/2005) ([adchia](https://github.com/adchia)) + +### Bug Fixes + +- requested\_features are not passed to online\_read\(\) from passthrough\_provider [\#2106](https://github.com/feast-dev/feast/issues/2106) +- `feast apply` broken with 0.15.\* if the registry already exists [\#2086](https://github.com/feast-dev/feast/issues/2086) +- Inconsistent logic with `on_demand_feature_views` [\#2072](https://github.com/feast-dev/feast/issues/2072) +- Fix release workflow to pass the python version and docker build targets [\#2122](https://github.com/feast-dev/feast/pull/2122) ([adchia](https://github.com/adchia)) +- requested\_features is passed to online\_read from passthrough\_provider [\#2107](https://github.com/feast-dev/feast/pull/2107) ([aurobindoc](https://github.com/aurobindoc)) +- Don't materialize FeatureViews where `online is False` [\#2101](https://github.com/feast-dev/feast/pull/2101) ([judahrand](https://github.com/judahrand)) +- Have apply\_total use the repo\_config that's passed in as a parameter \(makes it more compatible with custom wrapper code\) [\#2099](https://github.com/feast-dev/feast/pull/2099) ([mavysavydav](https://github.com/mavysavydav)) +- Do not attempt to compute ODFVs when there are no ODFVs [\#2090](https://github.com/feast-dev/feast/pull/2090) ([felixwang9817](https://github.com/felixwang9817)) +- Duplicate feast apply bug [\#2087](https://github.com/feast-dev/feast/pull/2087) ([felixwang9817](https://github.com/felixwang9817)) +- Add --host as an option for feast serve [\#2078](https://github.com/feast-dev/feast/pull/2078) ([nossrannug](https://github.com/nossrannug)) +- Fix feature server docker image tag generation in pr integration tests [\#2077](https://github.com/feast-dev/feast/pull/2077) ([tsotnet](https://github.com/tsotnet)) +- Fix ECR Image build on master branch [\#2076](https://github.com/feast-dev/feast/pull/2076) ([tsotnet](https://github.com/tsotnet)) +- Optimize memory usage during materialization [\#2073](https://github.com/feast-dev/feast/pull/2073) ([judahrand](https://github.com/judahrand)) +- Fix unexpected feature view deletion when applying edited odfv [\#2054](https://github.com/feast-dev/feast/pull/2054) ([ArrichM](https://github.com/ArrichM)) +- Properly exclude entities from feature inference [\#2048](https://github.com/feast-dev/feast/pull/2048) ([mavysavydav](https://github.com/mavysavydav)) +- Don't allow FeatureStore.apply with commit=False [\#2047](https://github.com/feast-dev/feast/pull/2047) ([nossrannug](https://github.com/nossrannug)) +- Fix bug causing OnDemandFeatureView.infer\_features\(\) to fail when the… [\#2046](https://github.com/feast-dev/feast/pull/2046) ([ArrichM](https://github.com/ArrichM)) +- Add missing comma in setup.py [\#2031](https://github.com/feast-dev/feast/pull/2031) ([achals](https://github.com/achals)) +- Correct cleanup after usage e2e tests [\#2015](https://github.com/feast-dev/feast/pull/2015) ([pyalex](https://github.com/pyalex)) +- Change Environment timestamps to be in UTC [\#2007](https://github.com/feast-dev/feast/pull/2007) ([felixwang9817](https://github.com/felixwang9817)) +- get\_online\_features on demand transform bug fixes + local integration test mode [\#2004](https://github.com/feast-dev/feast/pull/2004) ([adchia](https://github.com/adchia)) +- Always pass full and partial feature names to ODFV [\#2003](https://github.com/feast-dev/feast/pull/2003) ([judahrand](https://github.com/judahrand)) +- ODFV UDFs should handle list types [\#2002](https://github.com/feast-dev/feast/pull/2002) ([Agent007](https://github.com/Agent007)) +- Update bq\_to\_feast\_value\_type with BOOLEAN type as a legacy sql data type [\#1996](https://github.com/feast-dev/feast/pull/1996) ([mavysavydav](https://github.com/mavysavydav)) +- Fix bug where using some Pandas dtypes in the output of an ODFV fails [\#1994](https://github.com/feast-dev/feast/pull/1994) ([judahrand](https://github.com/judahrand)) +- Fix duplicate update infra [\#1990](https://github.com/feast-dev/feast/pull/1990) ([felixwang9817](https://github.com/felixwang9817)) +- Improve performance of \_convert\_arrow\_to\_proto [\#1984](https://github.com/feast-dev/feast/pull/1984) ([nossrannug](https://github.com/nossrannug)) + +### Merged Pull Requests + +- Add changelog for v0.16.0 [\#2120](https://github.com/feast-dev/feast/pull/2120) ([adchia](https://github.com/adchia)) +- Update FAQ [\#2118](https://github.com/feast-dev/feast/pull/2118) ([felixwang9817](https://github.com/felixwang9817)) +- Move helm chart back to main repo [\#2113](https://github.com/feast-dev/feast/pull/2113) ([pyalex](https://github.com/pyalex)) +- Set package long description encoding to UTF-8 [\#2111](https://github.com/feast-dev/feast/pull/2111) ([danilopeixoto](https://github.com/danilopeixoto)) +- Update release workflow to include new docker images [\#2108](https://github.com/feast-dev/feast/pull/2108) ([adchia](https://github.com/adchia)) +- Use the maintainers group in Codeowners instead of individuals [\#2102](https://github.com/feast-dev/feast/pull/2102) ([achals](https://github.com/achals)) +- Remove tfx schema from ValueType [\#2098](https://github.com/feast-dev/feast/pull/2098) ([pyalex](https://github.com/pyalex)) +- Add data source implementations to RTD docs [\#2097](https://github.com/feast-dev/feast/pull/2097) ([felixwang9817](https://github.com/felixwang9817)) +- Updated feature view documentation to include blurb about feature inferencing [\#2096](https://github.com/feast-dev/feast/pull/2096) ([mavysavydav](https://github.com/mavysavydav)) +- Fix integration test that is unstable due to incorrect materialization boundaries [\#2095](https://github.com/feast-dev/feast/pull/2095) ([pyalex](https://github.com/pyalex)) +- Broaden google-cloud-core dependency [\#2094](https://github.com/feast-dev/feast/pull/2094) ([ptoman-pa](https://github.com/ptoman-pa)) +- Use pip-tools to lock versions of dependent packages [\#2093](https://github.com/feast-dev/feast/pull/2093) ([ysk24ok](https://github.com/ysk24ok)) +- Fix typo in feature retrieval doc [\#2092](https://github.com/feast-dev/feast/pull/2092) ([olivierlabreche](https://github.com/olivierlabreche)) +- Fix typo in FeatureView example \(doc\) [\#2091](https://github.com/feast-dev/feast/pull/2091) ([olivierlabreche](https://github.com/olivierlabreche)) +- Use request.addfinalizer instead of the yield based approach in integ tests [\#2089](https://github.com/feast-dev/feast/pull/2089) ([achals](https://github.com/achals)) +- Odfv logic [\#2088](https://github.com/feast-dev/feast/pull/2088) ([felixwang9817](https://github.com/felixwang9817)) +- Refactor `_convert_arrow_to_proto` [\#2085](https://github.com/feast-dev/feast/pull/2085) ([judahrand](https://github.com/judahrand)) +- Add github run id into the integration test projects for debugging [\#2069](https://github.com/feast-dev/feast/pull/2069) ([achals](https://github.com/achals)) +- Fixing broken entity key link in quickstart [\#2068](https://github.com/feast-dev/feast/pull/2068) ([adchia](https://github.com/adchia)) +- Fix java\_release workflow by removing step without users/with [\#2067](https://github.com/feast-dev/feast/pull/2067) ([achals](https://github.com/achals)) +- Allow using cached registry when writing to the online store [\#2066](https://github.com/feast-dev/feast/pull/2066) ([achals](https://github.com/achals)) +- Raise import error when repo configs module cannot be imported [\#2065](https://github.com/feast-dev/feast/pull/2065) ([felixwang9817](https://github.com/felixwang9817)) +- Remove refs to tensorflow\_metadata [\#2063](https://github.com/feast-dev/feast/pull/2063) ([achals](https://github.com/achals)) +- Add detailed error messages for test\_univerisal\_e2e failures [\#2062](https://github.com/feast-dev/feast/pull/2062) ([achals](https://github.com/achals)) +- Remove unused protos & deprecated java modules [\#2061](https://github.com/feast-dev/feast/pull/2061) ([pyalex](https://github.com/pyalex)) +- Asynchronously refresh registry in transformation service [\#2060](https://github.com/feast-dev/feast/pull/2060) ([pyalex](https://github.com/pyalex)) +- Fix GH workflow for docker build of java parts [\#2059](https://github.com/feast-dev/feast/pull/2059) ([pyalex](https://github.com/pyalex)) +- Dedicated workflow for java PRs [\#2050](https://github.com/feast-dev/feast/pull/2050) ([pyalex](https://github.com/pyalex)) +- Run java integration test with real google cloud and aws [\#2049](https://github.com/feast-dev/feast/pull/2049) ([pyalex](https://github.com/pyalex)) +- Fixing typo enabling on\_demand\_transforms [\#2044](https://github.com/feast-dev/feast/pull/2044) ([ArrichM](https://github.com/ArrichM)) +- Make `feast registry-dump` print the whole registry as one json [\#2040](https://github.com/feast-dev/feast/pull/2040) ([nossrannug](https://github.com/nossrannug)) +- Remove tensorflow-metadata folders [\#2038](https://github.com/feast-dev/feast/pull/2038) ([casassg](https://github.com/casassg)) +- Update CHANGELOG for Feast v0.15.1 [\#2034](https://github.com/feast-dev/feast/pull/2034) ([felixwang9817](https://github.com/felixwang9817)) +- Remove unsupported java parts [\#2029](https://github.com/feast-dev/feast/pull/2029) ([pyalex](https://github.com/pyalex)) +- Fix checked out branch for PR docker image build workflow [\#2018](https://github.com/feast-dev/feast/pull/2018) ([tsotnet](https://github.com/tsotnet)) +- Extend "feast in production" page with description of java feature server [\#2017](https://github.com/feast-dev/feast/pull/2017) ([pyalex](https://github.com/pyalex)) +- Remove duplicates in setup.py and run rudimentary verifications [\#2016](https://github.com/feast-dev/feast/pull/2016) ([achals](https://github.com/achals)) +- Upload feature server docker image to ECR on approved PRs [\#2014](https://github.com/feast-dev/feast/pull/2014) ([tsotnet](https://github.com/tsotnet)) +- GitBook: \[\#1\] Plugin standards documentation [\#2011](https://github.com/feast-dev/feast/pull/2011) ([felixwang9817](https://github.com/felixwang9817)) +- Add changelog for v0.15.0 [\#2006](https://github.com/feast-dev/feast/pull/2006) ([adchia](https://github.com/adchia)) +- Add integration tests for AWS Lambda feature server [\#2001](https://github.com/feast-dev/feast/pull/2001) ([tsotnet](https://github.com/tsotnet)) + +# [v0.15.1](https://github.com/feast-dev/feast/tree/v0.15.1) (2021-11-13) + +[Full Changelog](https://github.com/feast-dev/feast/compare/v0.15.0...v0.15.1) + +### Bug Fixes + +- Add missing comma in setup.py [\#2031](https://github.com/feast-dev/feast/pull/2031) ([achals](https://github.com/achals)) +- Correct cleanup after usage e2e tests [\#2015](https://github.com/feast-dev/feast/pull/2015) ([pyalex](https://github.com/pyalex)) +- Change Environment timestamps to be in UTC [\#2007](https://github.com/feast-dev/feast/pull/2007) ([felixwang9817](https://github.com/felixwang9817)) +- ODFV UDFs should handle list types [\#2002](https://github.com/feast-dev/feast/pull/2002) ([Agent007](https://github.com/Agent007)) + +### Merged Pull Requests + +- Remove unsupported java parts [\#2029](https://github.com/feast-dev/feast/pull/2029) ([pyalex](https://github.com/pyalex)) +- Fix checked out branch for PR docker image build workflow [\#2018](https://github.com/feast-dev/feast/pull/2018) ([tsotnet](https://github.com/tsotnet)) +- Remove duplicates in setup.py and run rudimentary verifications [\#2016](https://github.com/feast-dev/feast/pull/2016) ([achals](https://github.com/achals)) +- Upload feature server docker image to ECR on approved PRs [\#2014](https://github.com/feast-dev/feast/pull/2014) ([tsotnet](https://github.com/tsotnet)) +- Add integration tests for AWS Lambda feature server [\#2001](https://github.com/feast-dev/feast/pull/2001) ([tsotnet](https://github.com/tsotnet)) +- Moving Feast Java back into main repo under java/ package [\#1997](https://github.com/feast-dev/feast/pull/1997) ([adchia](https://github.com/adchia)) + +# [v0.15.0](https://github.com/feast-dev/feast/tree/v0.15.0) (2021-11-08) + +[Full Changelog](https://github.com/feast-dev/feast/compare/v0.14.1...v0.15.0) + +### Features + +- Adding stream ingestion alpha documentation [\#2005](https://github.com/feast-dev/feast/pull/2005) ([adchia](https://github.com/adchia)) +- Feature transformation server docker image [\#1972](https://github.com/feast-dev/feast/pull/1972) ([felixwang9817](https://github.com/felixwang9817)) +- eventtime check before writing features, use pipelines, ttl [\#1961](https://github.com/feast-dev/feast/pull/1961) ([vas28r13](https://github.com/vas28r13)) +- Plugin repo universal tests [\#1946](https://github.com/feast-dev/feast/pull/1946) ([felixwang9817](https://github.com/felixwang9817)) +- direct data ingestion into Online store [\#1939](https://github.com/feast-dev/feast/pull/1939) ([vas28r13](https://github.com/vas28r13)) +- Add an interface for TransformationService and a basic implementation [\#1932](https://github.com/feast-dev/feast/pull/1932) ([achals](https://github.com/achals)) +- Allows registering of features in request data as RequestFeatureView. Refactors common logic into a BaseFeatureView class [\#1931](https://github.com/feast-dev/feast/pull/1931) ([adchia](https://github.com/adchia)) +- Add final\_output\_feature\_names in Query context to avoid SELECT \* EXCEPT [\#1911](https://github.com/feast-dev/feast/pull/1911) ([MattDelac](https://github.com/MattDelac)) +- Add Dockerfile for GCP CloudRun FeatureServer [\#1887](https://github.com/feast-dev/feast/pull/1887) ([judahrand](https://github.com/judahrand)) + +### Bug Fixes + +- feast=0.14.0 `query_generator()` unecessary used twice [\#1978](https://github.com/feast-dev/feast/issues/1978) +- get\_online\_features on demand transform bug fixes + local integration test mode [\#2004](https://github.com/feast-dev/feast/pull/2004) ([adchia](https://github.com/adchia)) +- Always pass full and partial feature names to ODFV [\#2003](https://github.com/feast-dev/feast/pull/2003) ([judahrand](https://github.com/judahrand)) +- Update bq\_to\_feast\_value\_type with BOOLEAN type as a legacy sql data type [\#1996](https://github.com/feast-dev/feast/pull/1996) ([mavysavydav](https://github.com/mavysavydav)) +- Fix bug where using some Pandas dtypes in the output of an ODFV fails [\#1994](https://github.com/feast-dev/feast/pull/1994) ([judahrand](https://github.com/judahrand)) +- Fix duplicate update infra [\#1990](https://github.com/feast-dev/feast/pull/1990) ([felixwang9817](https://github.com/felixwang9817)) +- Improve performance of \_convert\_arrow\_to\_proto [\#1984](https://github.com/feast-dev/feast/pull/1984) ([nossrannug](https://github.com/nossrannug)) +- Fix duplicate upload entity [\#1981](https://github.com/feast-dev/feast/pull/1981) ([achals](https://github.com/achals)) +- fix redis cluster materialization [\#1968](https://github.com/feast-dev/feast/pull/1968) ([qooba](https://github.com/qooba)) +- Allow plugin repos to actually overwrite repo configs [\#1966](https://github.com/feast-dev/feast/pull/1966) ([felixwang9817](https://github.com/felixwang9817)) +- Delete keys from Redis when tearing down online store [\#1965](https://github.com/feast-dev/feast/pull/1965) ([achals](https://github.com/achals)) +- Fix issues with lint test and upgrade pip version [\#1964](https://github.com/feast-dev/feast/pull/1964) ([felixwang9817](https://github.com/felixwang9817)) +- Move IntegrationTestRepoConfig class to another module [\#1962](https://github.com/feast-dev/feast/pull/1962) ([felixwang9817](https://github.com/felixwang9817)) +- Solve package conflict in \[gcp\] and \[ci\] [\#1955](https://github.com/feast-dev/feast/pull/1955) ([ysk24ok](https://github.com/ysk24ok)) +- Remove some paths from unit test cache [\#1944](https://github.com/feast-dev/feast/pull/1944) ([achals](https://github.com/achals)) +- Fix bug in feast alpha enable CLI command [\#1940](https://github.com/feast-dev/feast/pull/1940) ([felixwang9817](https://github.com/felixwang9817)) +- Fix conditional statements for if OnDemandFVs exist [\#1937](https://github.com/feast-dev/feast/pull/1937) ([codyjlin](https://github.com/codyjlin)) +- Fix \_\_getitem\_\_ return value for feature view and on-demand feature view [\#1936](https://github.com/feast-dev/feast/pull/1936) ([mavysavydav](https://github.com/mavysavydav)) +- Corrected setup.py BigQuery version that's needed for a contributor's merged PR 1844 [\#1934](https://github.com/feast-dev/feast/pull/1934) ([mavysavydav](https://github.com/mavysavydav)) + +### Merged Pull Requests + +- Fix protobuf version conflict in \[gcp\] and \[ci\] packages [\#1992](https://github.com/feast-dev/feast/pull/1992) ([ysk24ok](https://github.com/ysk24ok)) +- Improve aws lambda deployment \(logging, idempotency, etc\) [\#1985](https://github.com/feast-dev/feast/pull/1985) ([tsotnet](https://github.com/tsotnet)) +- Extend context for usage statistics collection & add latencies for performance analysis [\#1983](https://github.com/feast-dev/feast/pull/1983) ([pyalex](https://github.com/pyalex)) +- Update CHANGELOG for Feast v0.14.1 [\#1982](https://github.com/feast-dev/feast/pull/1982) ([felixwang9817](https://github.com/felixwang9817)) +- Document AWS Lambda permissions [\#1970](https://github.com/feast-dev/feast/pull/1970) ([tsotnet](https://github.com/tsotnet)) +- Update online store helper docstring [\#1957](https://github.com/feast-dev/feast/pull/1957) ([amommendes](https://github.com/amommendes)) +- Add public docs for entity aliasing [\#1951](https://github.com/feast-dev/feast/pull/1951) ([codyjlin](https://github.com/codyjlin)) +- Updating roadmap + hero image [\#1950](https://github.com/feast-dev/feast/pull/1950) ([adchia](https://github.com/adchia)) +- Add David and Matt as approvers as well [\#1943](https://github.com/feast-dev/feast/pull/1943) ([achals](https://github.com/achals)) +- Add David and Matt as reviewers, and add actions for issue/PR assignment [\#1942](https://github.com/feast-dev/feast/pull/1942) ([achals](https://github.com/achals)) +- Simplify BigQuery load jobs [\#1935](https://github.com/feast-dev/feast/pull/1935) ([judahrand](https://github.com/judahrand)) + +# [v0.14.1](https://github.com/feast-dev/feast/tree/v0.14.1) (2021-10-28) + +[Full Changelog](https://github.com/feast-dev/feast/compare/v0.14.0...v0.14.1) + +### Bug Fixes + +- Fix duplicate upload entity [\#1981](https://github.com/feast-dev/feast/pull/1981) ([achals](https://github.com/achals)) +- Fix bug in feast alpha enable CLI command [\#1940](https://github.com/feast-dev/feast/pull/1940) ([felixwang9817](https://github.com/felixwang9817)) +- Fix conditional statements for if OnDemandFVs exist [\#1937](https://github.com/feast-dev/feast/pull/1937) ([codyjlin](https://github.com/codyjlin)) +- Fix \_\_getitem\_\_ return value for feature view and on-demand feature view [\#1936](https://github.com/feast-dev/feast/pull/1936) ([mavysavydav](https://github.com/mavysavydav)) +- Corrected setup.py BigQuery version that's needed for a contributor's merged PR 1844 [\#1934](https://github.com/feast-dev/feast/pull/1934) ([mavysavydav](https://github.com/mavysavydav)) + +### Merged Pull Requests + +- Updating roadmap + hero image [\#1950](https://github.com/feast-dev/feast/pull/1950) ([adchia](https://github.com/adchia)) +- Simplify BigQuery load jobs [\#1935](https://github.com/feast-dev/feast/pull/1935) ([judahrand](https://github.com/judahrand)) + +# [v0.14.0](https://github.com/feast-dev/feast/tree/v0.14.0) (2021-10-08) + +[Full Changelog](https://github.com/feast-dev/feast/compare/v0.13.0...v0.14.0) + +### Features + +- Changed FVProjection 'name\_to\_use' field to 'name\_alias' and changed '.set\_projection' in FeatureView to ".with\_projection". Also adjustments for some edge cases [\#1929](https://github.com/feast-dev/feast/pull/1929) ([mavysavydav](https://github.com/mavysavydav)) +- Make serverless alpha feature [\#1928](https://github.com/feast-dev/feast/pull/1928) ([felixwang9817](https://github.com/felixwang9817)) +- Feast endpoint [\#1927](https://github.com/feast-dev/feast/pull/1927) ([felixwang9817](https://github.com/felixwang9817)) +- Add location to BigQueryOfflineStoreConfig [\#1921](https://github.com/feast-dev/feast/pull/1921) ([loftiskg](https://github.com/loftiskg)) +- Create & teardown Lambda & API Gateway resources for serverless feature server [\#1900](https://github.com/feast-dev/feast/pull/1900) ([tsotnet](https://github.com/tsotnet)) +- Hide FeatureViewProjections from user interface & have FeatureViews carry FVProjections that carries the modified info of the FeatureView [\#1899](https://github.com/feast-dev/feast/pull/1899) ([mavysavydav](https://github.com/mavysavydav)) +- Upload docker image to ECR during feast apply [\#1877](https://github.com/feast-dev/feast/pull/1877) ([felixwang9817](https://github.com/felixwang9817)) +- Added .with\_name method in FeatureView/OnDemandFeatureView classes for name aliasing. FeatureViewProjection will hold this information [\#1872](https://github.com/feast-dev/feast/pull/1872) ([mavysavydav](https://github.com/mavysavydav)) + +### Bug Fixes + +- Update makefile to use pip installed dependencies [\#1920](https://github.com/feast-dev/feast/pull/1920) ([loftiskg](https://github.com/loftiskg)) +- Delete tables [\#1916](https://github.com/feast-dev/feast/pull/1916) ([felixwang9817](https://github.com/felixwang9817)) +- Set a 5 minute limit for redshift statement execution [\#1915](https://github.com/feast-dev/feast/pull/1915) ([achals](https://github.com/achals)) +- Use set when parsing repos to prevent duplicates [\#1913](https://github.com/feast-dev/feast/pull/1913) ([achals](https://github.com/achals)) +- resolve environment variables in repo config [\#1909](https://github.com/feast-dev/feast/pull/1909) ([samuel100](https://github.com/samuel100)) +- Respect specified ValueTypes for features during materialization [\#1906](https://github.com/feast-dev/feast/pull/1906) ([Agent007](https://github.com/Agent007)) +- Fix issue with feature views being detected as duplicated when imported [\#1905](https://github.com/feast-dev/feast/pull/1905) ([achals](https://github.com/achals)) +- Use contextvars to maintain a call stack during the usage calls [\#1882](https://github.com/feast-dev/feast/pull/1882) ([achals](https://github.com/achals)) + +### Merged Pull Requests + +- Update concepts/README.md [\#1926](https://github.com/feast-dev/feast/pull/1926) ([ysk24ok](https://github.com/ysk24ok)) +- Add CI for feature server Docker image [\#1925](https://github.com/feast-dev/feast/pull/1925) ([felixwang9817](https://github.com/felixwang9817)) +- cache provider in feature store instance [\#1924](https://github.com/feast-dev/feast/pull/1924) ([DvirDukhan](https://github.com/DvirDukhan)) +- Refactor logging and error messages in serverless [\#1923](https://github.com/feast-dev/feast/pull/1923) ([felixwang9817](https://github.com/felixwang9817)) +- Add a caching step to our github actions [\#1919](https://github.com/feast-dev/feast/pull/1919) ([achals](https://github.com/achals)) +- Add provider, offline store, online store, registry to RTD [\#1918](https://github.com/feast-dev/feast/pull/1918) ([felixwang9817](https://github.com/felixwang9817)) +- Cleanup tests [\#1901](https://github.com/feast-dev/feast/pull/1901) ([felixwang9817](https://github.com/felixwang9817)) + +# [v0.13.0](https://github.com/feast-dev/feast/tree/v0.13.0) (2021-09-22) + +[Full Changelog](https://github.com/feast-dev/feast/compare/v0.12.1...v0.13.0) + +### Breaking Changes + +- Enforce case-insensitively unique feature view names [\#1835](https://github.com/feast-dev/feast/pull/1835) ([codyjlin](https://github.com/codyjlin)) +- Add init to Provider contract [\#1796](https://github.com/feast-dev/feast/pull/1796) ([woop](https://github.com/woop)) + +### Features + +- Add on demand feature view experimental docs [\#1880](https://github.com/feast-dev/feast/pull/1880) ([adchia](https://github.com/adchia)) +- Adding telemetry for on demand feature views and making existing usage calls async [\#1873](https://github.com/feast-dev/feast/pull/1873) ([adchia](https://github.com/adchia)) +- Read registry & config from env variables in AWS Lambda feature server [\#1870](https://github.com/feast-dev/feast/pull/1870) ([tsotnet](https://github.com/tsotnet)) +- Add feature server configuration for AWS lambda [\#1865](https://github.com/feast-dev/feast/pull/1865) ([felixwang9817](https://github.com/felixwang9817)) +- Add MVP support for on demand transforms for AWS to\_s3 and to\_redshift [\#1856](https://github.com/feast-dev/feast/pull/1856) ([adchia](https://github.com/adchia)) +- Add MVP support for on demand transforms for bigquery [\#1855](https://github.com/feast-dev/feast/pull/1855) ([adchia](https://github.com/adchia)) +- Add arrow support for on demand feature views [\#1853](https://github.com/feast-dev/feast/pull/1853) ([adchia](https://github.com/adchia)) +- Support adding request data in on demand transforms [\#1851](https://github.com/feast-dev/feast/pull/1851) ([adchia](https://github.com/adchia)) +- Support on demand feature views in feature services [\#1849](https://github.com/feast-dev/feast/pull/1849) ([achals](https://github.com/achals)) +- Infer features for on demand feature views, support multiple output features [\#1845](https://github.com/feast-dev/feast/pull/1845) ([achals](https://github.com/achals)) +- Add Registry and CLI operations for on demand feature views [\#1828](https://github.com/feast-dev/feast/pull/1828) ([achals](https://github.com/achals)) +- Implementing initial on demand transforms for historical retrieval to\_df [\#1824](https://github.com/feast-dev/feast/pull/1824) ([adchia](https://github.com/adchia)) +- Registry store plugin [\#1812](https://github.com/feast-dev/feast/pull/1812) ([DvirDukhan](https://github.com/DvirDukhan)) +- Enable entityless featureviews [\#1804](https://github.com/feast-dev/feast/pull/1804) ([codyjlin](https://github.com/codyjlin)) +- Initial scaffolding for on demand feature view [\#1803](https://github.com/feast-dev/feast/pull/1803) ([adchia](https://github.com/adchia)) +- Add s3 support \(with custom endpoints\) [\#1789](https://github.com/feast-dev/feast/pull/1789) ([woop](https://github.com/woop)) +- Local feature server implementation \(HTTP endpoint\) [\#1780](https://github.com/feast-dev/feast/pull/1780) ([tsotnet](https://github.com/tsotnet)) + +### Bug Fixes + +- Fixing odfv cli group description [\#1890](https://github.com/feast-dev/feast/pull/1890) ([adchia](https://github.com/adchia)) +- Fix list feature format for BigQuery offline datasources. [\#1889](https://github.com/feast-dev/feast/pull/1889) ([judahrand](https://github.com/judahrand)) +- Add `dill` to main dependencies [\#1886](https://github.com/feast-dev/feast/pull/1886) ([judahrand](https://github.com/judahrand)) +- Fix pytest\_collection\_modifyitems to select benchmark tests only [\#1874](https://github.com/feast-dev/feast/pull/1874) ([achals](https://github.com/achals)) +- Add support for multiple entities in Redshift [\#1850](https://github.com/feast-dev/feast/pull/1850) ([felixwang9817](https://github.com/felixwang9817)) +- Move apply\(dummy\_entity\) to apply time to ensure it persists in FeatureStore [\#1848](https://github.com/feast-dev/feast/pull/1848) ([codyjlin](https://github.com/codyjlin)) +- Add schema parameter to RedshiftSource [\#1847](https://github.com/feast-dev/feast/pull/1847) ([felixwang9817](https://github.com/felixwang9817)) +- Pass bigquery job object to get\_job [\#1844](https://github.com/feast-dev/feast/pull/1844) ([LarsKlingen](https://github.com/LarsKlingen)) +- Simplify \_python\_value\_to\_proto\_value by looking up values in a dict [\#1837](https://github.com/feast-dev/feast/pull/1837) ([achals](https://github.com/achals)) +- Update historical retrieval integration test for on demand feature views [\#1836](https://github.com/feast-dev/feast/pull/1836) ([achals](https://github.com/achals)) +- Fix flaky connection to redshift data API [\#1834](https://github.com/feast-dev/feast/pull/1834) ([achals](https://github.com/achals)) +- Init registry during create\_test\_environment [\#1829](https://github.com/feast-dev/feast/pull/1829) ([achals](https://github.com/achals)) +- Randomly generating new BQ dataset for offline\_online\_store\_consistency test [\#1818](https://github.com/feast-dev/feast/pull/1818) ([adchia](https://github.com/adchia)) +- Ensure docstring tests always teardown [\#1817](https://github.com/feast-dev/feast/pull/1817) ([felixwang9817](https://github.com/felixwang9817)) +- Use get\_multi instead of get for datastore reads [\#1814](https://github.com/feast-dev/feast/pull/1814) ([achals](https://github.com/achals)) +- Fix Redshift query for external tables [\#1810](https://github.com/feast-dev/feast/pull/1810) ([woop](https://github.com/woop)) +- Use a random dataset and table name for simple\_bq\_source [\#1801](https://github.com/feast-dev/feast/pull/1801) ([achals](https://github.com/achals)) +- Refactor Environment class and DataSourceCreator API, and use fixtures for datasets and data sources [\#1790](https://github.com/feast-dev/feast/pull/1790) ([achals](https://github.com/achals)) +- Fix get\_online\_features telemetry to only log every 10000 times [\#1786](https://github.com/feast-dev/feast/pull/1786) ([felixwang9817](https://github.com/felixwang9817)) +- Add a description field the Feature Service class and proto [\#1771](https://github.com/feast-dev/feast/pull/1771) ([achals](https://github.com/achals)) +- Validate project name upon feast.apply [\#1766](https://github.com/feast-dev/feast/pull/1766) ([tedhtchang](https://github.com/tedhtchang)) +- Fix BQ historical retrieval with rows that got backfilled [\#1744](https://github.com/feast-dev/feast/pull/1744) ([MattDelac](https://github.com/MattDelac)) +- Handle case where`_LIST` type is empty [\#1703](https://github.com/feast-dev/feast/pull/1703) ([judahrand](https://github.com/judahrand)) + +### Merged Pull Requests + +- Add `ValueType.NULL` [\#1893](https://github.com/feast-dev/feast/pull/1893) ([judahrand](https://github.com/judahrand)) +- Adding more details to the CONTRIBUTING.md [\#1888](https://github.com/feast-dev/feast/pull/1888) ([adchia](https://github.com/adchia)) +- Parse BQ `DATETIME` and `TIMESTAMP` [\#1885](https://github.com/feast-dev/feast/pull/1885) ([judahrand](https://github.com/judahrand)) +- Add durations to list the slowest tests [\#1881](https://github.com/feast-dev/feast/pull/1881) ([achals](https://github.com/achals)) +- Upload benchmark information to S3 after integration test runs [\#1878](https://github.com/feast-dev/feast/pull/1878) ([achals](https://github.com/achals)) +- Refactor providers to remove duplicate implementations [\#1876](https://github.com/feast-dev/feast/pull/1876) ([achals](https://github.com/achals)) +- Add Felix & Danny to code owners file [\#1869](https://github.com/feast-dev/feast/pull/1869) ([tsotnet](https://github.com/tsotnet)) +- Initial docker image for aws lambda feature server [\#1866](https://github.com/feast-dev/feast/pull/1866) ([tsotnet](https://github.com/tsotnet)) +- Add flags file to include experimental flags and test/usage flags [\#1864](https://github.com/feast-dev/feast/pull/1864) ([adchia](https://github.com/adchia)) +- Hookup pytest-benchmark to online retreival [\#1858](https://github.com/feast-dev/feast/pull/1858) ([achals](https://github.com/achals)) +- Add feature server docs & small changes in local server [\#1852](https://github.com/feast-dev/feast/pull/1852) ([tsotnet](https://github.com/tsotnet)) +- Add roadmap to README.md [\#1843](https://github.com/feast-dev/feast/pull/1843) ([woop](https://github.com/woop)) +- Enable the types test to run on all compatible environments [\#1840](https://github.com/feast-dev/feast/pull/1840) ([adchia](https://github.com/adchia)) +- Update reviewers/approvers to include Danny/Felix [\#1833](https://github.com/feast-dev/feast/pull/1833) ([adchia](https://github.com/adchia)) +- Fix wrong links in README [\#1832](https://github.com/feast-dev/feast/pull/1832) ([baineng](https://github.com/baineng)) +- Remove older offline/online consistency tests [\#1831](https://github.com/feast-dev/feast/pull/1831) ([achals](https://github.com/achals)) +- Replace individual cli tests with parametrized tests [\#1830](https://github.com/feast-dev/feast/pull/1830) ([achals](https://github.com/achals)) +- Reducing wait interval for BQ integration tests [\#1827](https://github.com/feast-dev/feast/pull/1827) ([adchia](https://github.com/adchia)) +- Reducing size of universal repo to decrease integration test time [\#1826](https://github.com/feast-dev/feast/pull/1826) ([adchia](https://github.com/adchia)) +- Refactor the datastore online\_read method to be slightly more efficient [\#1819](https://github.com/feast-dev/feast/pull/1819) ([achals](https://github.com/achals)) +- Remove old doc [\#1815](https://github.com/feast-dev/feast/pull/1815) ([achals](https://github.com/achals)) +- Rename telemetry to usage [\#1800](https://github.com/feast-dev/feast/pull/1800) ([felixwang9817](https://github.com/felixwang9817)) +- Updating quickstart colab to explain more concepts and highlight value prop of Feast [\#1799](https://github.com/feast-dev/feast/pull/1799) ([adchia](https://github.com/adchia)) +- Fix Azure Terraform installation. [\#1793](https://github.com/feast-dev/feast/pull/1793) ([mmurdoch](https://github.com/mmurdoch)) +- Disable integration test reruns to identify flaky tests [\#1787](https://github.com/feast-dev/feast/pull/1787) ([achals](https://github.com/achals)) +- Rerun failed python integration tests [\#1785](https://github.com/feast-dev/feast/pull/1785) ([achals](https://github.com/achals)) +- Add Redis to the universal integration tests [\#1784](https://github.com/feast-dev/feast/pull/1784) ([achals](https://github.com/achals)) +- Add online feature retrieval integration test using the universal repo [\#1783](https://github.com/feast-dev/feast/pull/1783) ([achals](https://github.com/achals)) +- Fix wrong description in README.md [\#1779](https://github.com/feast-dev/feast/pull/1779) ([WingCode](https://github.com/WingCode)) +- Clean up docstring tests [\#1778](https://github.com/feast-dev/feast/pull/1778) ([felixwang9817](https://github.com/felixwang9817)) +- Add offline retrival integration tests using the universal repo [\#1769](https://github.com/feast-dev/feast/pull/1769) ([achals](https://github.com/achals)) +- Adding initial type support related tests for BQ [\#1768](https://github.com/feast-dev/feast/pull/1768) ([adchia](https://github.com/adchia)) +- Add release-patch script [\#1554](https://github.com/feast-dev/feast/pull/1554) ([jklegar](https://github.com/jklegar)) + +# [v0.12.1](https://github.com/feast-dev/feast/tree/v0.12.1) (2021-08-20) + +[Full Changelog](https://github.com/feast-dev/feast/compare/v0.12.0...v0.12.1) + +### Bug Fixes + +- Fix get\_online\_features telemetry to only log every 10000 times [\#1786](https://github.com/feast-dev/feast/pull/1786) ([felixwang9817](https://github.com/felixwang9817)) + +# [v0.12.0](https://github.com/feast-dev/feast/tree/v0.12.0) (2021-08-05) + +[Full Changelog](https://github.com/feast-dev/feast/compare/v0.11.0...v0.12.0) + +### Breaking Changes + +- Set default feature naming to not include feature view name. Add option to include feature view name in feature naming. [\#1641](https://github.com/feast-dev/feast/pull/1641) ([Mwad22](https://github.com/Mwad22)) + +### Features + +- AWS Template improvements \(input prompt for configs, default to Redshift\) [\#1731](https://github.com/feast-dev/feast/pull/1731) ([tsotnet](https://github.com/tsotnet)) +- Clean up uploaded entities in Redshift offline store [\#1730](https://github.com/feast-dev/feast/pull/1730) ([tsotnet](https://github.com/tsotnet)) +- Implement Redshift historical retrieval [\#1720](https://github.com/feast-dev/feast/pull/1720) ([tsotnet](https://github.com/tsotnet)) +- Add custom data sources [\#1713](https://github.com/feast-dev/feast/pull/1713) ([achals](https://github.com/achals)) +- Added --skip-source-validation flag to feast apply [\#1702](https://github.com/feast-dev/feast/pull/1702) ([mavysavydav](https://github.com/mavysavydav)) +- Allow specifying FeatureServices in FeatureStore methods [\#1691](https://github.com/feast-dev/feast/pull/1691) ([achals](https://github.com/achals)) +- Implement materialization for RedshiftOfflineStore & RedshiftRetrievalJob [\#1680](https://github.com/feast-dev/feast/pull/1680) ([tsotnet](https://github.com/tsotnet)) +- Add FeatureService proto definition [\#1676](https://github.com/feast-dev/feast/pull/1676) ([achals](https://github.com/achals)) +- Add RedshiftDataSource [\#1669](https://github.com/feast-dev/feast/pull/1669) ([tsotnet](https://github.com/tsotnet)) +- Add streaming sources to the FeatureView API [\#1664](https://github.com/feast-dev/feast/pull/1664) ([achals](https://github.com/achals)) +- Add to\_table\(\) to RetrievalJob object [\#1663](https://github.com/feast-dev/feast/pull/1663) ([MattDelac](https://github.com/MattDelac)) +- Provide the user with more options for setting the to\_bigquery config [\#1661](https://github.com/feast-dev/feast/pull/1661) ([codyjlin](https://github.com/codyjlin)) + +### Bug Fixes + +- Fix `feast apply` bugs [\#1754](https://github.com/feast-dev/feast/pull/1754) ([tsotnet](https://github.com/tsotnet)) +- Teardown integration tests resources for aws [\#1740](https://github.com/feast-dev/feast/pull/1740) ([achals](https://github.com/achals)) +- Fix GCS version [\#1732](https://github.com/feast-dev/feast/pull/1732) ([potatochip](https://github.com/potatochip)) +- Fix unit test warnings related to file\_url [\#1726](https://github.com/feast-dev/feast/pull/1726) ([tedhtchang](https://github.com/tedhtchang)) +- Refactor data source classes to fix import issues [\#1723](https://github.com/feast-dev/feast/pull/1723) ([achals](https://github.com/achals)) +- Append ns time and random integer to redshift test tables [\#1716](https://github.com/feast-dev/feast/pull/1716) ([achals](https://github.com/achals)) +- Add randomness to bigquery table name [\#1711](https://github.com/feast-dev/feast/pull/1711) ([felixwang9817](https://github.com/felixwang9817)) +- Fix dry\_run bug that was making to\_bigquery hang indefinitely [\#1706](https://github.com/feast-dev/feast/pull/1706) ([codyjlin](https://github.com/codyjlin)) +- Stringify WhichOneof to make mypy happy [\#1705](https://github.com/feast-dev/feast/pull/1705) ([achals](https://github.com/achals)) +- Update redis options parsing [\#1704](https://github.com/feast-dev/feast/pull/1704) ([DvirDukhan](https://github.com/DvirDukhan)) +- Cancel BigQuery job if block\_until\_done call times out or is interrupted [\#1699](https://github.com/feast-dev/feast/pull/1699) ([codyjlin](https://github.com/codyjlin)) +- Teardown infrastructure after integration tests [\#1697](https://github.com/feast-dev/feast/pull/1697) ([achals](https://github.com/achals)) +- Fix unit tests that got broken by Pandas 1.3.0 release [\#1683](https://github.com/feast-dev/feast/pull/1683) ([tsotnet](https://github.com/tsotnet)) +- Remove default list from the FeatureView constructor [\#1679](https://github.com/feast-dev/feast/pull/1679) ([achals](https://github.com/achals)) +- BQ exception should be raised first before we check the timedout [\#1675](https://github.com/feast-dev/feast/pull/1675) ([MattDelac](https://github.com/MattDelac)) +- Allow strings for online/offline store instead of dicts [\#1673](https://github.com/feast-dev/feast/pull/1673) ([achals](https://github.com/achals)) +- Cancel BigQuery job if timeout hits [\#1672](https://github.com/feast-dev/feast/pull/1672) ([MattDelac](https://github.com/MattDelac)) +- Make sure FeatureViews with same name can not be applied at the same … [\#1651](https://github.com/feast-dev/feast/pull/1651) ([tedhtchang](https://github.com/tedhtchang)) + +### Merged Pull Requests + +- Add AWS docs in summary.md [\#1761](https://github.com/feast-dev/feast/pull/1761) ([tsotnet](https://github.com/tsotnet)) +- Document permissions for AWS \(DynamoDB & Redshift\) [\#1753](https://github.com/feast-dev/feast/pull/1753) ([tsotnet](https://github.com/tsotnet)) +- Adding small note for project naming convention [\#1752](https://github.com/feast-dev/feast/pull/1752) ([codyjlin](https://github.com/codyjlin)) +- Fix warning in FeatureView.from\_proto [\#1751](https://github.com/feast-dev/feast/pull/1751) ([tsotnet](https://github.com/tsotnet)) +- Add Feature Service to the concepts group [\#1750](https://github.com/feast-dev/feast/pull/1750) ([achals](https://github.com/achals)) +- Docstring tests [\#1749](https://github.com/feast-dev/feast/pull/1749) ([felixwang9817](https://github.com/felixwang9817)) +- Document how pandas deals with missing values [\#1748](https://github.com/feast-dev/feast/pull/1748) ([achals](https://github.com/achals)) +- Restore feature refs [\#1746](https://github.com/feast-dev/feast/pull/1746) ([felixwang9817](https://github.com/felixwang9817)) +- Updating CLI apply to use FeatureStore [\#1745](https://github.com/feast-dev/feast/pull/1745) ([adchia](https://github.com/adchia)) +- Delete old code [\#1743](https://github.com/feast-dev/feast/pull/1743) ([felixwang9817](https://github.com/felixwang9817)) +- Bump dependency on pyyaml [\#1742](https://github.com/feast-dev/feast/pull/1742) ([achals](https://github.com/achals)) +- Docstrings [\#1739](https://github.com/feast-dev/feast/pull/1739) ([felixwang9817](https://github.com/felixwang9817)) +- Add the foundation of the universal feature repo and a test that uses it [\#1734](https://github.com/feast-dev/feast/pull/1734) ([achals](https://github.com/achals)) +- Add AWS documentation \(DynamoDB, Redshift\) [\#1733](https://github.com/feast-dev/feast/pull/1733) ([tsotnet](https://github.com/tsotnet)) +- Change internal references from input to batch\_source [\#1729](https://github.com/feast-dev/feast/pull/1729) ([felixwang9817](https://github.com/felixwang9817)) +- Refactor tests into new directory layout [\#1725](https://github.com/feast-dev/feast/pull/1725) ([achals](https://github.com/achals)) +- Registry teardown [\#1718](https://github.com/feast-dev/feast/pull/1718) ([felixwang9817](https://github.com/felixwang9817)) +- Redirect telemetry to usage [\#1717](https://github.com/feast-dev/feast/pull/1717) ([felixwang9817](https://github.com/felixwang9817)) +- Link to offline and online store specs in docs summary [\#1715](https://github.com/feast-dev/feast/pull/1715) ([achals](https://github.com/achals)) +- Avoid skewed join between entity\_df & feature views [\#1712](https://github.com/feast-dev/feast/pull/1712) ([MattDelac](https://github.com/MattDelac)) +- Remove type comments [\#1710](https://github.com/feast-dev/feast/pull/1710) ([achals](https://github.com/achals)) +- Increase efficiency of Registry updates [\#1698](https://github.com/feast-dev/feast/pull/1698) ([felixwang9817](https://github.com/felixwang9817)) +- Parallelize integration tests [\#1684](https://github.com/feast-dev/feast/pull/1684) ([tsotnet](https://github.com/tsotnet)) +- Remove debug logging [\#1678](https://github.com/feast-dev/feast/pull/1678) ([charliec443](https://github.com/charliec443)) +- Docs: Fix Feature References example [\#1674](https://github.com/feast-dev/feast/pull/1674) ([GregKuhlmann](https://github.com/GregKuhlmann)) +- Rename to\_table to to\_arrow [\#1671](https://github.com/feast-dev/feast/pull/1671) ([MattDelac](https://github.com/MattDelac)) +- Small reference documentation update [\#1668](https://github.com/feast-dev/feast/pull/1668) ([nels](https://github.com/nels)) +- Grouped inferencing statements together in apply methods for easier readability [\#1667](https://github.com/feast-dev/feast/pull/1667) ([mavysavydav](https://github.com/mavysavydav)) +- Infer min and max timestamps from entity\_df to limit data read from BQ source [\#1665](https://github.com/feast-dev/feast/pull/1665) ([Mwad22](https://github.com/Mwad22)) +- Rename telemetry to usage [\#1660](https://github.com/feast-dev/feast/pull/1660) ([tsotnet](https://github.com/tsotnet)) +- Update charts README [\#1659](https://github.com/feast-dev/feast/pull/1659) ([szalai1](https://github.com/szalai1)) + +# [v0.11.0](https://github.com/feast-dev/feast/tree/v0.11.0) (2021-06-24) [Full Changelog](https://github.com/feast-dev/feast/compare/v0.10.8...v0.11.0) -**Implemented enhancements:** +### Features - Allow BigQuery project to be configured [\#1656](https://github.com/feast-dev/feast/pull/1656) ([MattDelac](https://github.com/MattDelac)) - Add to_bigquery function to BigQueryRetrievalJob [\#1634](https://github.com/feast-dev/feast/pull/1634) ([vtao2](https://github.com/vtao2)) @@ -19,7 +716,7 @@ - Add support for Redis and Redis Cluster [\#1511](https://github.com/feast-dev/feast/pull/1511) ([qooba](https://github.com/qooba)) - Add path option to cli [\#1509](https://github.com/feast-dev/feast/pull/1509) ([tedhtchang](https://github.com/tedhtchang)) -**Fixed bugs:** +### Bug Fixes - Schema Inferencing should happen at apply time [\#1646](https://github.com/feast-dev/feast/pull/1646) ([mavysavydav](https://github.com/mavysavydav)) - Don't use .result\(\) in BigQueryOfflineStore, since it still leads to OOM [\#1642](https://github.com/feast-dev/feast/pull/1642) ([tsotnet](https://github.com/tsotnet)) @@ -40,7 +737,7 @@ - Validate project and repo names for apply and init commands [\#1558](https://github.com/feast-dev/feast/pull/1558) ([tedhtchang](https://github.com/tedhtchang)) - Bump supported Python version to 3.7 [\#1504](https://github.com/feast-dev/feast/pull/1504) ([tsotnet](https://github.com/tsotnet)) -**Merged pull requests:** +### Merged Pull Requests - Rename telemetry to usage [\#1660](https://github.com/feast-dev/feast/pull/1660) ([tsotnet](https://github.com/tsotnet)) - Refactor OfflineStoreConfig classes into their owning modules [\#1657](https://github.com/feast-dev/feast/pull/1657) ([achals](https://github.com/achals)) @@ -78,74 +775,74 @@ - Add optional telemetry to other CLI commands [\#1505](https://github.com/feast-dev/feast/pull/1505) ([jklegar](https://github.com/jklegar)) -## [v0.10.8](https://github.com/feast-dev/feast/tree/v0.10.8) (2021-06-17) +# [v0.10.8](https://github.com/feast-dev/feast/tree/v0.10.8) (2021-06-17) [Full Changelog](https://github.com/feast-dev/feast/compare/v0.10.7...v0.10.8) -**Implemented enhancements:** +### Features - Add `to_bigquery()` function to BigQueryRetrievalJob [\#1634](https://github.com/feast-dev/feast/pull/1634) ([vtao2](https://github.com/vtao2)) -**Fixed bugs:** +### Bug Fixes - Don't use .result\(\) in BigQueryOfflineStore, since it still leads to OOM [\#1642](https://github.com/feast-dev/feast/pull/1642) ([tsotnet](https://github.com/tsotnet)) - Don't load entire bigquery query results in memory [\#1638](https://github.com/feast-dev/feast/pull/1638) ([tsotnet](https://github.com/tsotnet)) - Add entity column validations when getting historical features from bigquery [\#1614](https://github.com/feast-dev/feast/pull/1614) ([achals](https://github.com/achals)) -**Merged pull requests:** +### Merged Pull Requests - Make test historical retrieval longer [\#1630](https://github.com/feast-dev/feast/pull/1630) ([MattDelac](https://github.com/MattDelac)) - Fix failing historical retrieval assertion [\#1622](https://github.com/feast-dev/feast/pull/1622) ([woop](https://github.com/woop)) - Optimize historical retrieval with BigQuery offline store [\#1602](https://github.com/feast-dev/feast/pull/1602) ([MattDelac](https://github.com/MattDelac)) -## [v0.10.7](https://github.com/feast-dev/feast/tree/v0.10.7) (2021-06-07) +# [v0.10.7](https://github.com/feast-dev/feast/tree/v0.10.7) (2021-06-07) [Full Changelog](https://github.com/feast-dev/feast/compare/v0.10.6...v0.10.7) -**Fixed bugs:** +### Bug Fixes - Fix race condition in historical e2e tests [\#1620](https://github.com/feast-dev/feast/pull/1620) ([woop](https://github.com/woop)) -**Merged pull requests:** +### Merged Pull Requests - Use drop\_duplicates\(\) instead of groupby \(about 1.5~2x faster\) [\#1617](https://github.com/feast-dev/feast/pull/1617) ([rightx2](https://github.com/rightx2)) -- Use CONCAT\(\) instead of ROW\_NUMBER\(\) [\#1601](https://github.com/feast-dev/feast/pull/1601) ([MattDelac](https://github.com/MattDelac)) +- Use CONCAT\(\) instead of ROW\_NUMBER\(\) [\#1601](https://github.com/feast-dev/feast/pull/1601) ([MattDelac](https://github.com/MattDelac)) - Minor doc fix in the code snippet: Fix to reference the right instance for the retrieved job instance object [\#1599](https://github.com/feast-dev/feast/pull/1599) ([dmatrix](https://github.com/dmatrix)) - Append nanoseconds to dataset name in test\_historical\_retrival to prevent tests stomping over each other [\#1593](https://github.com/feast-dev/feast/pull/1593) ([achals](https://github.com/achals)) - Make start and end timestamps tz aware in the CLI [\#1590](https://github.com/feast-dev/feast/pull/1590) ([achals](https://github.com/achals)) -## [v0.10.6](https://github.com/feast-dev/feast/tree/v0.10.6) (2021-05-27) +# [v0.10.6](https://github.com/feast-dev/feast/tree/v0.10.6) (2021-05-27) [Full Changelog](https://github.com/feast-dev/feast/compare/v0.10.5...v0.10.6) -**Implemented enhancements:** +### Features - Add datastore namespace option in configs [\#1581](https://github.com/feast-dev/feast/pull/1581) ([tsotnet](https://github.com/tsotnet)) -**Fixed bugs:** +### Bug Fixes - Fix contention issue [\#1582](https://github.com/feast-dev/feast/pull/1582) ([woop](https://github.com/woop)) - Ensure that only None types fail predicate [\#1580](https://github.com/feast-dev/feast/pull/1580) ([woop](https://github.com/woop)) - Don't create bigquery dataset if it already exists [\#1569](https://github.com/feast-dev/feast/pull/1569) ([tsotnet](https://github.com/tsotnet)) -**Merged pull requests:** +### Merged Pull Requests - Change OfflineStore class description [\#1571](https://github.com/feast-dev/feast/pull/1571) ([tedhtchang](https://github.com/tedhtchang)) -## [v0.10.5](https://github.com/feast-dev/feast/tree/v0.10.5) (2021-05-19) +# [v0.10.5](https://github.com/feast-dev/feast/tree/v0.10.5) (2021-05-19) [Full Changelog](https://github.com/feast-dev/feast/compare/v0.10.4...v0.10.5) -**Implemented enhancements:** +### Features - Add offline\_store config [\#1552](https://github.com/feast-dev/feast/pull/1552) ([tsotnet](https://github.com/tsotnet)) -**Fixed bugs:** +### Bug Fixes - Validate project and repo names for apply and init commands [\#1558](https://github.com/feast-dev/feast/pull/1558) ([tedhtchang](https://github.com/tedhtchang)) -**Merged pull requests:** +### Merged Pull Requests - Fix Sphinx documentation building [\#1563](https://github.com/feast-dev/feast/pull/1563) ([woop](https://github.com/woop)) - Add test coverage and remove MacOS integration tests [\#1562](https://github.com/feast-dev/feast/pull/1562) ([woop](https://github.com/woop)) @@ -154,17 +851,17 @@ - Add opt-out exception logging telemetry [\#1535](https://github.com/feast-dev/feast/pull/1535) ([jklegar](https://github.com/jklegar)) - Add instruction for install Feast on IKS and OpenShift using Kustomize [\#1534](https://github.com/feast-dev/feast/pull/1534) ([tedhtchang](https://github.com/tedhtchang)) -## [v0.10.4](https://github.com/feast-dev/feast/tree/v0.10.4) (2021-05-12) +# [v0.10.4](https://github.com/feast-dev/feast/tree/v0.10.4) (2021-05-12) [Full Changelog](https://github.com/feast-dev/feast/compare/v0.10.3...v0.10.4) -**Implemented enhancements:** +### Features - Inferencing of Features in FeatureView and timestamp column of DataSource [\#1523](https://github.com/feast-dev/feast/pull/1523) ([mavysavydav](https://github.com/mavysavydav)) - Add Unix Timestamp value type [\#1520](https://github.com/feast-dev/feast/pull/1520) ([MattDelac](https://github.com/MattDelac)) - Fix materialize for None [\#1481](https://github.com/feast-dev/feast/pull/1481) ([qooba](https://github.com/qooba)) -**Merged pull requests:** +### Merged Pull Requests - BigQuery type to Feast type conversion chart update [\#1530](https://github.com/feast-dev/feast/pull/1530) ([mavysavydav](https://github.com/mavysavydav)) - remove unnecessay path join in setup.py [\#1529](https://github.com/feast-dev/feast/pull/1529) ([shihabuddinbuet](https://github.com/shihabuddinbuet)) @@ -175,22 +872,22 @@ - Better logging for materialize command [\#1499](https://github.com/feast-dev/feast/pull/1499) ([jklegar](https://github.com/jklegar)) -## [v0.10.3](https://github.com/feast-dev/feast/tree/v0.10.3) (2021-04-21) +# [v0.10.3](https://github.com/feast-dev/feast/tree/v0.10.3) (2021-04-21) [Full Changelog](https://github.com/feast-dev/feast/compare/v0.10.2...v0.10.3) -**Implemented enhancements:** +### Features - Add support for third party providers [\#1501](https://github.com/feast-dev/feast/pull/1501) ([tsotnet](https://github.com/tsotnet)) - Infer entity dataframe event timestamp column [\#1495](https://github.com/feast-dev/feast/pull/1495) ([jklegar](https://github.com/jklegar)) - Allow Feast apply to import files recursively \(and add .feastignore\) [\#1482](https://github.com/feast-dev/feast/pull/1482) ([tsotnet](https://github.com/tsotnet)) -**Fixed bugs:** +### Bug Fixes - Bump supported Python version to 3.7 [\#1504](https://github.com/feast-dev/feast/pull/1504) ([tsotnet](https://github.com/tsotnet)) - Fix bug in allowing empty repositories to be applied to a GCS registry [\#1488](https://github.com/feast-dev/feast/pull/1488) ([woop](https://github.com/woop)) -**Merged pull requests:** +### Merged Pull Requests - Add a fixed timestamp to quickstart data [\#1513](https://github.com/feast-dev/feast/pull/1513) ([jklegar](https://github.com/jklegar)) - Make gcp imports optional [\#1512](https://github.com/feast-dev/feast/pull/1512) ([jklegar](https://github.com/jklegar)) @@ -201,23 +898,23 @@ - Update broken urls in contributing.md [\#1489](https://github.com/feast-dev/feast/pull/1489) ([tedhtchang](https://github.com/tedhtchang)) - Python docs formatting fixes [\#1473](https://github.com/feast-dev/feast/pull/1473) ([jklegar](https://github.com/jklegar)) -## [v0.10.2](https://github.com/feast-dev/feast/tree/v0.10.2) (2021-04-21) +# [v0.10.2](https://github.com/feast-dev/feast/tree/v0.10.2) (2021-04-21) [Full Changelog](https://github.com/feast-dev/feast/compare/v0.10.1...v0.10.2) -**Fixed bugs:** +### Bug Fixes - Fix bug in allowing empty repositories to be applied to a GCS registry [\#1488](https://github.com/feast-dev/feast/pull/1488) ([woop](https://github.com/woop)) -## [v0.10.1](https://github.com/feast-dev/feast/tree/v0.10.1) (2021-04-21) +# [v0.10.1](https://github.com/feast-dev/feast/tree/v0.10.1) (2021-04-21) [Full Changelog](https://github.com/feast-dev/feast/compare/v0.10.0...v0.10.1) -**Fixed bugs:** +### Bug Fixes - Fix time zone issue with get\_historical\_features [\#1475](https://github.com/feast-dev/feast/pull/1475) ([tsotnet](https://github.com/tsotnet)) -**Merged pull requests:** +### Merged Pull Requests - Improve exception handling, logging, and validation [\#1477](https://github.com/feast-dev/feast/pull/1477) ([woop](https://github.com/woop)) - Remove duped pic [\#1476](https://github.com/feast-dev/feast/pull/1476) ([YikSanChan](https://github.com/YikSanChan)) @@ -226,11 +923,11 @@ - Fix CLI entities command & add feature-views command [\#1471](https://github.com/feast-dev/feast/pull/1471) ([tsotnet](https://github.com/tsotnet)) -## [v0.10.0](https://github.com/feast-dev/feast/tree/0.10.0) (2021-04-15) +# [v0.10.0](https://github.com/feast-dev/feast/tree/0.10.0) (2021-04-15) [Full Changelog](https://github.com/feast-dev/feast/compare/v0.9.5...v0.10.0) -** Implemented enhancements:** +### Features - Add template generation to Feast CLI for Google Cloud Platform [\#1460](https://github.com/feast-dev/feast/pull/1460) ([woop](https://github.com/woop)) - Add support for retrieving data from sources that don't match providers [\#1454](https://github.com/feast-dev/feast/pull/1454) ([woop](https://github.com/woop)) @@ -252,7 +949,7 @@ - FeatureStore, FeatureView, Config, and BigQuerySource classes for updated SDK [\#1364](https://github.com/feast-dev/feast/pull/1364) ([jklegar](https://github.com/jklegar)) - Add support for new deploy CLI [\#1362](https://github.com/feast-dev/feast/pull/1362) ([oavdeev](https://github.com/oavdeev)) -** Fixed bugs:** +### Bug Fixes - Fix time zone access with native python datetimes [\#1469](https://github.com/feast-dev/feast/pull/1469) ([tsotnet](https://github.com/tsotnet)) - Small fixes for created\_timestamp [\#1468](https://github.com/feast-dev/feast/pull/1468) ([jklegar](https://github.com/jklegar)) @@ -269,7 +966,7 @@ - Make CLI apply in local mode idempotent [\#1401](https://github.com/feast-dev/feast/pull/1401) ([oavdeev](https://github.com/oavdeev)) - Fix a bug in client archive\_project method and fix lint in grpc auth [\#1396](https://github.com/feast-dev/feast/pull/1396) ([randxie](https://github.com/randxie)) -**Merged pull requests:** +### Merged Pull Requests - Change GCP template names to match local template [\#1470](https://github.com/feast-dev/feast/pull/1470) ([jklegar](https://github.com/jklegar)) - Add logging to materialize [\#1467](https://github.com/feast-dev/feast/pull/1467) ([woop](https://github.com/woop)) @@ -316,25 +1013,25 @@ - Add Firestore online format specification [\#1367](https://github.com/feast-dev/feast/pull/1367) ([oavdeev](https://github.com/oavdeev)) - Improve documentation for k8s-spark resource template [\#1363](https://github.com/feast-dev/feast/pull/1363) ([theofpa](https://github.com/theofpa)) -## [v0.9.1](https://github.com/feast-dev/feast/tree/v0.9.1) (2021-01-29) +# [v0.9.1](https://github.com/feast-dev/feast/tree/v0.9.1) (2021-01-29) [Full Changelog](https://github.com/feast-dev/feast/compare/v0.9.0...v0.9.1) -**Implemented enhancements:** +### Features - Add telemetry to Python SDK [\#1289](https://github.com/feast-dev/feast/pull/1289) ([jklegar](https://github.com/jklegar)) -**Fixed bugs:** +### Bug Fixes - Fix kafka download url [\#1298](https://github.com/feast-dev/feast/pull/1298) ([jklegar](https://github.com/jklegar)) - disable telemetry in docker-compose test and job\_service [\#1297](https://github.com/feast-dev/feast/pull/1297) ([jklegar](https://github.com/jklegar)) -## [v0.9.0](https://github.com/feast-dev/feast/tree/v0.9.0) (2021-01-28) +# [v0.9.0](https://github.com/feast-dev/feast/tree/v0.9.0) (2021-01-28) [Full Changelog](https://github.com/feast-dev/feast/compare/v0.8.4...v0.9.0) -**Implemented enhancements:** +### Features - Enable user to provide spark job template as input for jobservice deployment [\#1285](https://github.com/feast-dev/feast/pull/1285) ([khorshuheng](https://github.com/khorshuheng)) - Add feature table name filter to jobs list api [\#1282](https://github.com/feast-dev/feast/pull/1282) ([terryyylim](https://github.com/terryyylim)) @@ -343,7 +1040,7 @@ - Azure example terraform [\#1274](https://github.com/feast-dev/feast/pull/1274) ([jklegar](https://github.com/jklegar)) -**Fixed bugs:** +### Bug Fixes - make EMR jar uploader work the same as k8s one [\#1284](https://github.com/feast-dev/feast/pull/1284) ([oavdeev](https://github.com/oavdeev)) - Don't error when azure vars not set [\#1277](https://github.com/feast-dev/feast/pull/1277) ([jklegar](https://github.com/jklegar)) @@ -354,7 +1051,7 @@ - Bump terraform rds module version [\#1204](https://github.com/feast-dev/feast/pull/1204) ([oavdeev](https://github.com/oavdeev)) -**Merged pull requests:** +### Merged Pull Requests - Use date partitioning column in FileSource [\#1293](https://github.com/feast-dev/feast/pull/1293) ([pyalex](https://github.com/pyalex)) - Add EMR CI/CD entrypoint script [\#1290](https://github.com/feast-dev/feast/pull/1290) ([oavdeev](https://github.com/oavdeev)) @@ -411,41 +1108,41 @@ - Remove unnecessary Google Auth dependency [\#1170](https://github.com/feast-dev/feast/pull/1170) ([woop](https://github.com/woop)) -## [v0.8.2](https://github.com/feast-dev/feast/tree/v0.8.2) (2020-12-01) +# [v0.8.2](https://github.com/feast-dev/feast/tree/v0.8.2) (2020-12-01) [Full Changelog](https://github.com/feast-dev/feast/compare/v0.8.1...v0.8.2) -**Implemented enhancements:** +### Features - Configurable materialization destination for view in BigQuerySource [\#1201](https://github.com/feast-dev/feast/pull/1201) ([pyalex](https://github.com/pyalex)) -**Fixed bugs:** +### Bug Fixes - Fix tag order for release workflow [\#1205](https://github.com/feast-dev/feast/pull/1205) ([terryyylim](https://github.com/terryyylim)) - Fix Feature Table not updated on new feature addition [\#1197](https://github.com/feast-dev/feast/pull/1197) ([khorshuheng](https://github.com/khorshuheng)) -**Merged pull requests:** +### Merged Pull Requests - Suppress kafka logs in Ingestion Job [\#1206](https://github.com/feast-dev/feast/pull/1206) ([pyalex](https://github.com/pyalex)) - Add project name to metrics labels in Ingestion Job [\#1202](https://github.com/feast-dev/feast/pull/1202) ([pyalex](https://github.com/pyalex)) -## [v0.8.1](https://github.com/feast-dev/feast/tree/v0.8.1) (2020-11-24) +# [v0.8.1](https://github.com/feast-dev/feast/tree/v0.8.1) (2020-11-24) [Full Changelog](https://github.com/feast-dev/feast/compare/v0.8.0...v0.8.1) -**Implemented enhancements:** +### Features - Expires Redis Keys based on Feature Table Max Age [\#1161](https://github.com/feast-dev/feast/pull/1161) ([khorshuheng](https://github.com/khorshuheng)) - Jobservice control loop \(based on \#1140\) [\#1156](https://github.com/feast-dev/feast/pull/1156) ([oavdeev](https://github.com/oavdeev)) -**Fixed bugs:** +### Bug Fixes - Lazy metrics initialization \(to correct pick up in executor\) [\#1195](https://github.com/feast-dev/feast/pull/1195) ([pyalex](https://github.com/pyalex)) - Add missing third\_party folder [\#1185](https://github.com/feast-dev/feast/pull/1185) ([terryyylim](https://github.com/terryyylim)) - Fix missing name variable instantiation [\#1166](https://github.com/feast-dev/feast/pull/1166) ([terryyylim](https://github.com/terryyylim)) -**Merged pull requests:** +### Merged Pull Requests - Bump ssh-agent version [\#1175](https://github.com/feast-dev/feast/pull/1175) ([terryyylim](https://github.com/terryyylim)) - Refactor configurable options and add sphinx docs [\#1174](https://github.com/feast-dev/feast/pull/1174) ([terryyylim](https://github.com/terryyylim)) @@ -456,11 +1153,11 @@ - Ensure consistency of github workflow [\#1157](https://github.com/feast-dev/feast/pull/1157) ([terryyylim](https://github.com/terryyylim)) -## [v0.8.0](https://github.com/feast-dev/feast/tree/v0.8.0) (2020-11-10) +# [v0.8.0](https://github.com/feast-dev/feast/tree/v0.8.0) (2020-11-10) [Full Changelog](https://github.com/feast-dev/feast/compare/v0.7.1...v0.8.0) -**Implemented enhancements:** +### Features - Implement JobService API calls & connect it to SDK [\#1129](https://github.com/feast-dev/feast/pull/1129) ([tsotnet](https://github.com/tsotnet)) - Allow user to specify custom secrets to be mounted on Feast Serving and Feast Core pods [\#1127](https://github.com/feast-dev/feast/pull/1127) ([khorshuheng](https://github.com/khorshuheng)) @@ -498,7 +1195,7 @@ - Add Feature Tables API to Core & Python SDK [\#1019](https://github.com/feast-dev/feast/pull/1019) ([mrzzy](https://github.com/mrzzy)) - Introduce Entity as higher-level concept [\#1014](https://github.com/feast-dev/feast/pull/1014) ([terryyylim](https://github.com/terryyylim)) -**Fixed bugs:** +### Bug Fixes - Fix stencil client serialization issue [\#1147](https://github.com/feast-dev/feast/pull/1147) ([pyalex](https://github.com/pyalex)) - Deadletter path is being incorrectly joined [\#1144](https://github.com/feast-dev/feast/pull/1144) ([pyalex](https://github.com/pyalex)) @@ -509,7 +1206,7 @@ - Fix java class name validation [\#1084](https://github.com/feast-dev/feast/pull/1084) ([oavdeev](https://github.com/oavdeev)) - Multiple tiny AWS related fixes [\#1083](https://github.com/feast-dev/feast/pull/1083) ([oavdeev](https://github.com/oavdeev)) -**Merged pull requests:** +### Merged Pull Requests - Make created\_timestamp property optional in KafkaSource [\#1146](https://github.com/feast-dev/feast/pull/1146) ([pyalex](https://github.com/pyalex)) - In Streaming E2E Test filter kafka consumers by group id prefix [\#1145](https://github.com/feast-dev/feast/pull/1145) ([pyalex](https://github.com/pyalex)) @@ -558,17 +1255,17 @@ - Refactor Python SDK to remove v1 concepts [\#1023](https://github.com/feast-dev/feast/pull/1023) ([terryyylim](https://github.com/terryyylim)) -## [v0.7.1](https://github.com/feast-dev/feast/tree/v0.7.1) (2020-10-07) +# [v0.7.1](https://github.com/feast-dev/feast/tree/v0.7.1) (2020-10-07) [Full Changelog](https://github.com/feast-dev/feast/compare/sdk/go/v0.7.0...v0.7.1) -**Fixed bugs:** +### Bug Fixes - Provide stable jobName in RowMetrics labels [\#1028](https://github.com/feast-dev/feast/pull/1028) ([pyalex](https://github.com/pyalex)) -## [v0.7.0](https://github.com/feast-dev/feast/tree/v0.7.0) (2020-09-09) +# [v0.7.0](https://github.com/feast-dev/feast/tree/v0.7.0) (2020-09-09) [Full Changelog](https://github.com/feast-dev/feast/compare/sdk/go/v0.6.2...v0.7.0) -**Breaking changes:** +### Breaking Changes - Add request response logging via fluentd [\#961](https://github.com/feast-dev/feast/pull/961) ([terryyylim](https://github.com/terryyylim)) - Run JobCoontroller as separate application [\#951](https://github.com/feast-dev/feast/pull/951) ([pyalex](https://github.com/pyalex)) @@ -576,7 +1273,7 @@ - Use JobManager's backend as persistent storage and source of truth [\#903](https://github.com/feast-dev/feast/pull/903) ([pyalex](https://github.com/pyalex)) - Fix invalid characters for project, featureset, entity and features creation [\#976](https://github.com/feast-dev/feast/pull/976) ([terryyylim](https://github.com/terryyylim)) -**Implemented enhancements:** +### Features - Add redis key prefix as an option to Redis cluster [\#975](https://github.com/feast-dev/feast/pull/975) ([khorshuheng](https://github.com/khorshuheng)) - Authentication Support for Java & Go SDKs [\#971](https://github.com/feast-dev/feast/pull/971) ([mrzzy](https://github.com/mrzzy)) @@ -586,7 +1283,7 @@ - Add REST endpoints for Feast UI [\#878](https://github.com/feast-dev/feast/pull/878) ([SwampertX](https://github.com/SwampertX)) - Upgrade Feast dependencies [\#876](https://github.com/feast-dev/feast/pull/876) ([pyalex](https://github.com/pyalex)) -**Fixed bugs:** +### Bug Fixes - Fix Java & Go SDK TLS support [\#986](https://github.com/feast-dev/feast/pull/986) ([mrzzy](https://github.com/mrzzy)) - Fix Python SDK setuptools not supporting tags required for Go SDK to be versioned. [\#983](https://github.com/feast-dev/feast/pull/983) ([mrzzy](https://github.com/mrzzy)) @@ -621,7 +1318,7 @@ - Fix lint version not pulling tags. [\#999](https://github.com/feast-dev/feast/pull/999)([mrzzy](https://github.com/mrzzy)) - Call fallback only when theres missing keys [\#1009](https://github.com/feast-dev/feast/pull/751) ([pyalex](https://github.com/pyalex)) -**Merged pull requests:** +### Merged Pull Requests - Add cryptography to python ci-requirements [\#988](https://github.com/feast-dev/feast/pull/988) ([pyalex](https://github.com/pyalex)) - Allow maps in environment variables in helm charts [\#987](https://github.com/feast-dev/feast/pull/987) ([pyalex](https://github.com/pyalex)) @@ -644,10 +1341,10 @@ - BQ sink produces sample of successful inserts [\#875](https://github.com/feast-dev/feast/pull/875) ([pyalex](https://github.com/pyalex)) - Add Branch and RC Awareness to Version Lint & Fix Semver Regex [\#998](https://github.com/feast-dev/feast/pull/998) ([mrzzy](https://github.com/mrzzy)) -## [v0.6.2](https://github.com/feast-dev/feast/tree/v0.6.2) (2020-08-02) +# [v0.6.2](https://github.com/feast-dev/feast/tree/v0.6.2) (2020-08-02) [Full Changelog](https://github.com/feast-dev/feast/compare/v0.6.1...v0.6.2) -**Implemented enhancements:** +### Features - Redis sink flushes only rows that have more recent eventTimestamp [\#913](https://github.com/feast-dev/feast/pull/913) ([pyalex](https://github.com/pyalex)) - Dataflow runner options: disk type & streaming engine [\#906](https://github.com/feast-dev/feast/pull/906) ([pyalex](https://github.com/pyalex)) @@ -657,40 +1354,40 @@ - Add caching to authorization [\#884](https://github.com/feast-dev/feast/pull/884) ([jmelinav](https://github.com/jmelinav)) - Add Auth header [\#885](https://github.com/feast-dev/feast/pull/885) ([AnujaVane](https://github.com/AnujaVane)) -**Fixed bugs:** +### Bug Fixes - Fix Online Serving unable to retrieve feature data after Feature Set update. [\#908](https://github.com/feast-dev/feast/pull/908) ([mrzzy](https://github.com/mrzzy)) - Fix Python SDK ingestion for featureset name that exist in multiple projects [\#868](https://github.com/feast-dev/feast/pull/868) ([terryyylim](https://github.com/terryyylim)) - Backport delay in Redis acknowledgement of spec [\#915](https://github.com/feast-dev/feast/pull/915) ([woop](https://github.com/woop)) - Allow unauthenticated access when Authorization is disabled and to Health Probe [\#927](https://github.com/feast-dev/feast/pull/927) ([mrzzy](https://github.com/mrzzy)) -**Merged pull requests:** +### Merged Pull Requests - Upgrade Feast dependencies [\#876](https://github.com/feast-dev/feast/pull/876) ([pyalex](https://github.com/pyalex)) -## [v0.6.1](https://github.com/feast-dev/feast/tree/v0.6.1) (2020-07-17) +# [v0.6.1](https://github.com/feast-dev/feast/tree/v0.6.1) (2020-07-17) [Full Changelog](https://github.com/feast-dev/feast/compare/v0.6.0...v0.6.1) -**Implemented enhancements:** +### Features - Improve parallelization in Redis Sink [\#866](https://github.com/feast-dev/feast/pull/866) ([pyalex](https://github.com/pyalex)) - BQ sink produces sample of successful inserts [\#875](https://github.com/feast-dev/feast/pull/875) ([pyalex](https://github.com/pyalex)) -**Fixed bugs:** +### Bug Fixes - Add IngestionId & EventTimestamp to FeatureRowBatch to calculate lag metric correctly [\#874](https://github.com/feast-dev/feast/pull/874) ([pyalex](https://github.com/pyalex)) - Prevent race condition in BQ sink jobId generation [\#877](https://github.com/feast-dev/feast/pull/877) ([pyalex](https://github.com/pyalex)) -## [v0.6.0](https://github.com/feast-dev/feast/tree/v0.6.0) (2020-07-13) +# [v0.6.0](https://github.com/feast-dev/feast/tree/v0.6.0) (2020-07-13) [Full Changelog](https://github.com/feast-dev/feast/compare/v0.5.1...v0.6.0) -**Breaking changes:** +### Breaking Changes - Compute and write metrics for rows prior to store writes [\#763](https://github.com/feast-dev/feast/pull/763) ([zhilingc](https://github.com/zhilingc)) -**Implemented enhancements:** +### Features - Allow users compute statistics over retrieved batch datasets [\#799](https://github.com/feast-dev/feast/pull/799) ([zhilingc](https://github.com/zhilingc)) - Replace Keto Authorization with External HTTP Authorization [\#864](https://github.com/feast-dev/feast/pull/864) ([woop](https://github.com/woop)) @@ -708,7 +1405,7 @@ - Authentication and authorization support [\#793](https://github.com/feast-dev/feast/pull/793) ([dr3s](https://github.com/dr3s)) - Add API for listing feature sets using labels [\#785](https://github.com/feast-dev/feast/pull/785) ([terryyylim](https://github.com/terryyylim)) -**Fixed bugs:** +### Bug Fixes - Bypass authentication for metric endpoints [\#862](https://github.com/feast-dev/feast/pull/862) ([woop](https://github.com/woop)) - Python SDK listing of ingestion job fails for featureset reference filter [\#861](https://github.com/feast-dev/feast/pull/861) ([terryyylim](https://github.com/terryyylim)) @@ -728,7 +1425,7 @@ - Fix pipeline options toArgs\(\) returning empty list [\#765](https://github.com/feast-dev/feast/pull/765) ([zhilingc](https://github.com/zhilingc)) - Remove usage of parallel stream for feature value map generation [\#751](https://github.com/feast-dev/feast/pull/751) ([khorshuheng](https://github.com/khorshuheng)) -**Merged pull requests:** +### Merged Pull Requests - Remove Spring Boot from auth tests [\#859](https://github.com/feast-dev/feast/pull/859) ([woop](https://github.com/woop)) - Authentication and Authorization into feast-auth module. [\#856](https://github.com/feast-dev/feast/pull/856) ([jmelinav](https://github.com/jmelinav)) @@ -766,11 +1463,11 @@ [Full Changelog](https://github.com/feast-dev/feast/compare/v0.5.0...v0.5.1) -**Implemented enhancements:** +### Features - Add support for version method in Feast SDK and Core [\#759](https://github.com/feast-dev/feast/pull/759) ([woop](https://github.com/woop)) - Refactor runner configuration, add labels to dataflow options [\#718](https://github.com/feast-dev/feast/pull/718) ([zhilingc](https://github.com/zhilingc)) -**Fixed bugs:** +### Bug Fixes - Fix pipeline options toArgs\(\) returning empty list [\#765](https://github.com/feast-dev/feast/pull/765) ([zhilingc](https://github.com/zhilingc)) - Fix project argument for feature set describe in CLI [\#731](https://github.com/feast-dev/feast/pull/731) ([terryyylim](https://github.com/terryyylim)) - Fix Go and Java SDK Regressions [\#729](https://github.com/feast-dev/feast/pull/729) ([mrzzy](https://github.com/mrzzy)) @@ -778,17 +1475,17 @@ - Restore Feast Java SDK and Ingestion compatibility with Java 8 runtimes [\#722](https://github.com/feast-dev/feast/pull/722) ([ches](https://github.com/ches)) - Python sdk bug fixes [\#723](https://github.com/feast-dev/feast/pull/723) ([zhilingc](https://github.com/zhilingc)) -**Merged pull requests:** +### Merged Pull Requests - Increase Jaeger Tracing coverage [\#719](https://github.com/feast-dev/feast/pull/719) ([terryyylim](https://github.com/terryyylim)) - Recompile golang protos to include new FeatureSetStatus [\#755](https://github.com/feast-dev/feast/pull/755) ([zhilingc](https://github.com/zhilingc)) - Merge Redis cluster connector with Redis connector [\#752](https://github.com/feast-dev/feast/pull/752) ([pyalex](https://github.com/pyalex)) - Remove unused Hibernate dep from Serving [\#721](https://github.com/feast-dev/feast/pull/721) ([ches](https://github.com/ches)) -## [v0.5.0](https://github.com/feast-dev/feast/tree/v0.5.0) (2020-05-19) +# [v0.5.0](https://github.com/feast-dev/feast/tree/v0.5.0) (2020-05-19) [Full Changelog](https://github.com/feast-dev/feast/compare/v0.4.7...v0.5.0) -**Breaking changes:** +### Breaking Changes - Add .proto to packages of Protobuf generated Java classes [\#700](https://github.com/feast-dev/feast/pull/700) ([woop](https://github.com/woop)) - Add support for feature set updates and remove versions [\#676](https://github.com/feast-dev/feast/pull/676) ([zhilingc](https://github.com/zhilingc)) @@ -796,7 +1493,7 @@ See [Feast 0.5 Release Issue](https://github.com/feast-dev/feast/issues/527) for more details. -**Implemented enhancements:** +### Features - Add general storage API and refactor existing store implementations [\#567](https://github.com/feast-dev/feast/pull/567) ([zhilingc](https://github.com/zhilingc)) - Add support for feature set updates and remove versions [\#676](https://github.com/feast-dev/feast/pull/676) ([zhilingc](https://github.com/zhilingc)) @@ -809,7 +1506,7 @@ See [Feast 0.5 Release Issue](https://github.com/feast-dev/feast/issues/527) for - Add feature and feature set labels for metadata [\#536](https://github.com/feast-dev/feast/pull/536) ([imjuanleonard](https://github.com/imjuanleonard)) - Update Python SDK so FeatureSet can import Schema from Tensorflow metadata [\#450](https://github.com/feast-dev/feast/pull/450) ([davidheryanto](https://github.com/davidheryanto)) -**Fixed bugs:** +### Bug Fixes - Add feature set status JOB\_STARTING to denote feature sets waiting for job to get to RUNNING state [\#714](https://github.com/feast-dev/feast/pull/714) ([zhilingc](https://github.com/zhilingc)) - Remove feature set status check for job update requirement [\#708](https://github.com/feast-dev/feast/pull/708) ([khorshuheng](https://github.com/khorshuheng)) @@ -825,7 +1522,7 @@ See [Feast 0.5 Release Issue](https://github.com/feast-dev/feast/issues/527) for - Fix Feast Serving not registering its store in Feast Core [\#641](https://github.com/feast-dev/feast/pull/641) ([mrzzy](https://github.com/mrzzy)) - Kafka producer should raise an exception when it fails to connect to broker [\#636](https://github.com/feast-dev/feast/pull/636) ([junhui096](https://github.com/junhui096)) -**Merged pull requests:** +### Merged Pull Requests - Change organization from gojek to feast-dev [\#712](https://github.com/feast-dev/feast/pull/712) ([woop](https://github.com/woop)) - Extract feature set update tests so CI doesn't run it [\#709](https://github.com/feast-dev/feast/pull/709) ([zhilingc](https://github.com/zhilingc)) @@ -859,11 +1556,11 @@ See [Feast 0.5 Release Issue](https://github.com/feast-dev/feast/issues/527) for - Apply a fixed window before writing row metrics [\#590](https://github.com/feast-dev/feast/pull/590) ([davidheryanto](https://github.com/davidheryanto)) - Allow tests to run on non-master branches [\#588](https://github.com/feast-dev/feast/pull/588) ([woop](https://github.com/woop)) -## [v0.4.7](https://github.com/feast-dev/feast/tree/v0.4.7) (2020-03-17) +# [v0.4.7](https://github.com/feast-dev/feast/tree/v0.4.7) (2020-03-17) [Full Changelog](https://github.com/feast-dev/feast/compare/v0.4.6...v0.4.7) -**Merged pull requests:** +### Merged Pull Requests - Add log4j-web jar to core and serving. [\#498](https://github.com/feast-dev/feast/pull/498) ([Yanson](https://github.com/Yanson)) - Clear all the futures when sync is called. [\#501](https://github.com/feast-dev/feast/pull/501) ([lavkesh](https://github.com/lavkesh)) - Encode feature row before storing in Redis [\#530](https://github.com/feast-dev/feast/pull/530) ([khorshuheng](https://github.com/khorshuheng)) @@ -872,11 +1569,11 @@ See [Feast 0.5 Release Issue](https://github.com/feast-dev/feast/issues/527) for - Parameterize end to end test scripts. [\#433](https://github.com/feast-dev/feast/pull/433) ([Yanson](https://github.com/Yanson)) - Replacing Jedis With Lettuce in ingestion and serving [\#485](https://github.com/feast-dev/feast/pull/485) ([lavkesh](https://github.com/lavkesh)) -## [v0.4.6](https://github.com/feast-dev/feast/tree/v0.4.6) (2020-02-26) +# [v0.4.6](https://github.com/feast-dev/feast/tree/v0.4.6) (2020-02-26) [Full Changelog](https://github.com/feast-dev/feast/compare/v0.4.5...v0.4.6) -**Merged pull requests:** +### Merged Pull Requests - Rename metric name for request latency in feast serving [\#488](https://github.com/feast-dev/feast/pull/488) ([davidheryanto](https://github.com/davidheryanto)) - Allow use of secure gRPC in Feast Python client [\#459](https://github.com/feast-dev/feast/pull/459) ([Yanson](https://github.com/Yanson)) - Extend WriteMetricsTransform in Ingestion to write feature value stats to StatsD [\#486](https://github.com/feast-dev/feast/pull/486) ([davidheryanto](https://github.com/davidheryanto)) @@ -885,11 +1582,11 @@ See [Feast 0.5 Release Issue](https://github.com/feast-dev/feast/issues/527) for - Fail Spotless formatting check before tests execute [\#487](https://github.com/feast-dev/feast/pull/487) ([ches](https://github.com/ches)) - Reduce refresh rate of specification refresh in Serving to 10 seconds [\#481](https://github.com/feast-dev/feast/pull/481) ([woop](https://github.com/woop)) -## [v0.4.5](https://github.com/feast-dev/feast/tree/v0.4.5) (2020-02-14) +# [v0.4.5](https://github.com/feast-dev/feast/tree/v0.4.5) (2020-02-14) [Full Changelog](https://github.com/feast-dev/feast/compare/v0.4.4...v0.4.5) -**Merged pull requests:** +### Merged Pull Requests - Use bzip2 compressed feature set json as pipeline option [\#466](https://github.com/feast-dev/feast/pull/466) ([khorshuheng](https://github.com/khorshuheng)) - Make redis key creation more determinisitic [\#471](https://github.com/feast-dev/feast/pull/471) ([zhilingc](https://github.com/zhilingc)) - Helm Chart Upgrades [\#458](https://github.com/feast-dev/feast/pull/458) ([Yanson](https://github.com/Yanson)) @@ -900,11 +1597,11 @@ See [Feast 0.5 Release Issue](https://github.com/feast-dev/feast/issues/527) for - Deduplicate example notebooks [\#456](https://github.com/feast-dev/feast/pull/456) ([woop](https://github.com/woop)) - Allow users not to set max age for batch retrieval [\#446](https://github.com/feast-dev/feast/pull/446) ([zhilingc](https://github.com/zhilingc)) -## [v0.4.4](https://github.com/feast-dev/feast/tree/v0.4.4) (2020-01-28) +# [v0.4.4](https://github.com/feast-dev/feast/tree/v0.4.4) (2020-01-28) [Full Changelog](https://github.com/feast-dev/feast/compare/v0.4.3...v0.4.4) -**Merged pull requests:** +### Merged Pull Requests - Change RedisBackedJobService to use a connection pool [\#439](https://github.com/feast-dev/feast/pull/439) ([zhilingc](https://github.com/zhilingc)) - Update GKE installation and chart values to work with 0.4.3 [\#434](https://github.com/feast-dev/feast/pull/434) ([lgvital](https://github.com/lgvital)) @@ -916,23 +1613,23 @@ See [Feast 0.5 Release Issue](https://github.com/feast-dev/feast/issues/527) for - Update basic Feast example to Feast 0.4 [\#424](https://github.com/feast-dev/feast/pull/424) ([woop](https://github.com/woop)) - Introduce datatypes/java module for proto generation [\#391](https://github.com/feast-dev/feast/pull/391) ([ches](https://github.com/ches)) -## [v0.4.3](https://github.com/feast-dev/feast/tree/v0.4.3) (2020-01-08) +# [v0.4.3](https://github.com/feast-dev/feast/tree/v0.4.3) (2020-01-08) [Full Changelog](https://github.com/feast-dev/feast/compare/v0.4.2...v0.4.3) -**Fixed bugs:** +### Bug Fixes - Bugfix for redis ingestion retries throwing NullPointerException on remote runners [\#417](https://github.com/feast-dev/feast/pull/417) ([khorshuheng](https://github.com/khorshuheng)) -## [v0.4.2](https://github.com/feast-dev/feast/tree/v0.4.2) (2020-01-07) +# [v0.4.2](https://github.com/feast-dev/feast/tree/v0.4.2) (2020-01-07) [Full Changelog](https://github.com/feast-dev/feast/compare/v0.4.1...v0.4.2) -**Fixed bugs:** +### Bug Fixes - Missing argument in error string in ValidateFeatureRowDoFn [\#401](https://github.com/feast-dev/feast/issues/401) -**Merged pull requests:** +### Merged Pull Requests - Define maven revision property when packaging jars in Dockerfile so the images are built successfully [\#410](https://github.com/feast-dev/feast/pull/410) ([davidheryanto](https://github.com/davidheryanto)) - Deduplicate rows in subquery [\#409](https://github.com/feast-dev/feast/pull/409) ([zhilingc](https://github.com/zhilingc)) @@ -943,24 +1640,24 @@ See [Feast 0.5 Release Issue](https://github.com/feast-dev/feast/issues/527) for - Use Nexus staging plugin for deployment [\#394](https://github.com/feast-dev/feast/pull/394) ([khorshuheng](https://github.com/khorshuheng)) - Handle retry for redis io flow [\#274](https://github.com/feast-dev/feast/pull/274) ([khorshuheng](https://github.com/khorshuheng)) -## [v0.4.1](https://github.com/feast-dev/feast/tree/v0.4.1) (2019-12-30) +# [v0.4.1](https://github.com/feast-dev/feast/tree/v0.4.1) (2019-12-30) [Full Changelog](https://github.com/feast-dev/feast/compare/v0.4.0...v0.4.1) -**Merged pull requests:** +### Merged Pull Requests - Add project-related commands to CLI [\#397](https://github.com/feast-dev/feast/pull/397) ([zhilingc](https://github.com/zhilingc)) -## [v0.4.0](https://github.com/feast-dev/feast/tree/v0.4.0) (2019-12-28) +# [v0.4.0](https://github.com/feast-dev/feast/tree/v0.4.0) (2019-12-28) [Full Changelog](https://github.com/feast-dev/feast/compare/v0.3.5...v0.4.0) -**Implemented enhancements:** +### Features - Edit description in feature specification to also reflect in BigQuery schema description. [\#239](https://github.com/feast-dev/feast/issues/239) - Allow for disabling of metrics pushing [\#57](https://github.com/feast-dev/feast/issues/57) -**Merged pull requests:** +### Merged Pull Requests - Java SDK release script [\#406](https://github.com/feast-dev/feast/pull/406) ([davidheryanto](https://github.com/davidheryanto)) - Use fixed 'dev' revision for test-e2e-batch [\#395](https://github.com/feast-dev/feast/pull/395) ([davidheryanto](https://github.com/davidheryanto)) @@ -978,62 +1675,62 @@ See [Feast 0.5 Release Issue](https://github.com/feast-dev/feast/issues/527) for - Add readiness checks for Feast services in end to end test [\#337](https://github.com/feast-dev/feast/pull/337) ([davidheryanto](https://github.com/davidheryanto)) - Create CHANGELOG.md [\#321](https://github.com/feast-dev/feast/pull/321) ([woop](https://github.com/woop)) -## [v0.3.8](https://github.com/feast-dev/feast/tree/v0.3.8) (2020-06-10) +# [v0.3.8](https://github.com/feast-dev/feast/tree/v0.3.8) (2020-06-10) [Full Changelog](https://github.com/feast-dev/feast/compare/v0.3.7...v0.3.8) -**Implemented enhancements:** +### Features - v0.3 backport: Add feature and feature set labels [\#737](https://github.com/feast-dev/feast/pull/737) ([ches](https://github.com/ches)) -**Merged pull requests:** +### Merged Pull Requests - v0.3 backport: Add Java coverage reporting [\#734](https://github.com/feast-dev/feast/pull/734) ([ches](https://github.com/ches)) -## [v0.3.7](https://github.com/feast-dev/feast/tree/v0.3.7) (2020-05-01) +# [v0.3.7](https://github.com/feast-dev/feast/tree/v0.3.7) (2020-05-01) [Full Changelog](https://github.com/feast-dev/feast/compare/v0.3.6...v0.3.7) -**Merged pull requests:** +### Merged Pull Requests - Moved end-to-end test scripts from .prow to infra [\#657](https://github.com/feast-dev/feast/pull/657) ([khorshuheng](https://github.com/khorshuheng)) - Backported \#566 & \#647 to v0.3 [\#654](https://github.com/feast-dev/feast/pull/654) ([ches](https://github.com/ches)) -## [v0.3.6](https://github.com/feast-dev/feast/tree/v0.3.6) (2020-01-03) +# [v0.3.6](https://github.com/feast-dev/feast/tree/v0.3.6) (2020-01-03) -**Merged pull requests:** +### Merged Pull Requests [Full Changelog](https://github.com/feast-dev/feast/compare/v0.3.5...v0.3.6) - Add support for file paths for providing entity rows during batch retrieval [\#375](https://github.com/feast-dev/feast/pull/375) ([voonhous](https://github.com/voonhous)) -## [v0.3.5](https://github.com/feast-dev/feast/tree/v0.3.5) (2019-12-26) +# [v0.3.5](https://github.com/feast-dev/feast/tree/v0.3.5) (2019-12-26) [Full Changelog](https://github.com/feast-dev/feast/compare/v0.3.4...v0.3.5) -**Merged pull requests:** +### Merged Pull Requests - Always set destination table in BigQuery query config in Feast Batch Serving so it can handle large results [\#392](https://github.com/feast-dev/feast/pull/392) ([davidheryanto](https://github.com/davidheryanto)) -## [v0.3.4](https://github.com/feast-dev/feast/tree/v0.3.4) (2019-12-23) +# [v0.3.4](https://github.com/feast-dev/feast/tree/v0.3.4) (2019-12-23) [Full Changelog](https://github.com/feast-dev/feast/compare/v0.3.3...v0.3.4) -**Merged pull requests:** +### Merged Pull Requests - Make redis key creation more determinisitic [\#380](https://github.com/feast-dev/feast/pull/380) ([zhilingc](https://github.com/zhilingc)) -## [v0.3.3](https://github.com/feast-dev/feast/tree/v0.3.3) (2019-12-18) +# [v0.3.3](https://github.com/feast-dev/feast/tree/v0.3.3) (2019-12-18) [Full Changelog](https://github.com/feast-dev/feast/compare/v0.3.2...v0.3.3) -**Implemented enhancements:** +### Features - Added Docker Compose for Feast [\#272](https://github.com/feast-dev/feast/issues/272) - Added ability to check import job status and cancel job through Python SDK [\#194](https://github.com/feast-dev/feast/issues/194) - Added basic customer transactions example [\#354](https://github.com/feast-dev/feast/pull/354) ([woop](https://github.com/woop)) -**Merged pull requests:** +### Merged Pull Requests - Added Prow jobs to automate the release of Docker images and Python SDK [\#369](https://github.com/feast-dev/feast/pull/369) ([davidheryanto](https://github.com/davidheryanto)) - Fixed installation link in README.md [\#368](https://github.com/feast-dev/feast/pull/368) ([Jeffwan](https://github.com/Jeffwan)) @@ -1049,11 +1746,11 @@ See [Feast 0.5 Release Issue](https://github.com/feast-dev/feast/issues/527) for - Added docker-compose file with Jupyter notebook [\#328](https://github.com/feast-dev/feast/pull/328) ([khorshuheng](https://github.com/khorshuheng)) - Added minimal implementation of ingesting Parquet and CSV files [\#327](https://github.com/feast-dev/feast/pull/327) ([voonhous](https://github.com/voonhous)) -## [v0.3.2](https://github.com/feast-dev/feast/tree/v0.3.2) (2019-11-29) +# [v0.3.2](https://github.com/feast-dev/feast/tree/v0.3.2) (2019-11-29) [Full Changelog](https://github.com/feast-dev/feast/compare/v0.3.1...v0.3.2) -**Merged pull requests:** +### Merged Pull Requests - Fixed incorrect BigQuery schema creation from FeatureSetSpec [\#340](https://github.com/feast-dev/feast/pull/340) ([davidheryanto](https://github.com/davidheryanto)) - Filtered out feature sets that dont share the same source [\#339](https://github.com/feast-dev/feast/pull/339) ([zhilingc](https://github.com/zhilingc)) @@ -1066,17 +1763,17 @@ See [Feast 0.5 Release Issue](https://github.com/feast-dev/feast/issues/527) for - Pinned Python SDK to minor versions for dependencies [\#322](https://github.com/feast-dev/feast/pull/322) ([woop](https://github.com/woop)) - Added Auto format to Google style with Spotless [\#317](https://github.com/feast-dev/feast/pull/317) ([ches](https://github.com/ches)) -## [v0.3.1](https://github.com/feast-dev/feast/tree/v0.3.1) (2019-11-25) +# [v0.3.1](https://github.com/feast-dev/feast/tree/v0.3.1) (2019-11-25) [Full Changelog](https://github.com/feast-dev/feast/compare/v0.3.0...v0.3.1) -**Merged pull requests:** +### Merged Pull Requests - Added Prometheus metrics to serving [\#316](https://github.com/feast-dev/feast/pull/316) ([zhilingc](https://github.com/zhilingc)) - Changed default job metrics sink to Statsd [\#315](https://github.com/feast-dev/feast/pull/315) ([zhilingc](https://github.com/zhilingc)) - Fixed module import error in Feast CLI [\#314](https://github.com/feast-dev/feast/pull/314) ([davidheryanto](https://github.com/davidheryanto)) -## [v0.3.0](https://github.com/feast-dev/feast/tree/v0.3.0) (2019-11-19) +# [v0.3.0](https://github.com/feast-dev/feast/tree/v0.3.0) (2019-11-19) [Full Changelog](https://github.com/feast-dev/feast/compare/v0.1.8...v0.3.0) @@ -1091,7 +1788,7 @@ See [Feast 0.5 Release Issue](https://github.com/feast-dev/feast/issues/527) for * Added job management to Feast Core to manage ingestion/population jobs to remote Feast deployments * Added metric support through Prometheus -**Merged pull requests:** +### Merged Pull Requests - Regenerate go protos [\#313](https://github.com/feast-dev/feast/pull/313) ([zhilingc](https://github.com/zhilingc)) - Bump chart version to 0.3.0 [\#311](https://github.com/feast-dev/feast/pull/311) ([zhilingc](https://github.com/zhilingc)) @@ -1119,11 +1816,11 @@ See [Feast 0.5 Release Issue](https://github.com/feast-dev/feast/issues/527) for - Move storage configuration to serving [\#254](https://github.com/feast-dev/feast/pull/254) ([zhilingc](https://github.com/zhilingc)) - Serving API changes for 0.3 [\#253](https://github.com/feast-dev/feast/pull/253) ([zhilingc](https://github.com/zhilingc)) -## [v0.1.8](https://github.com/feast-dev/feast/tree/v0.1.8) (2019-10-30) +# [v0.1.8](https://github.com/feast-dev/feast/tree/v0.1.8) (2019-10-30) [Full Changelog](https://github.com/feast-dev/feast/compare/v0.1.2...v0.1.8) -**Implemented enhancements:** +### Features - Feast cli config file should be settable by an env var [\#149](https://github.com/feast-dev/feast/issues/149) - Helm chart for deploying feast using Flink as runner [\#64](https://github.com/feast-dev/feast/issues/64) @@ -1132,28 +1829,28 @@ See [Feast 0.5 Release Issue](https://github.com/feast-dev/feast/issues/527) for - Change config to yaml [\#51](https://github.com/feast-dev/feast/issues/51) - Ability to pass runner option during ingestion job submission [\#50](https://github.com/feast-dev/feast/issues/50) -**Fixed bugs:** +### Bug Fixes - Fix Print Method in Feast CLI [\#211](https://github.com/feast-dev/feast/issues/211) - Dataflow monitoring by core is failing with incorrect job id [\#153](https://github.com/feast-dev/feast/issues/153) - Feast core crashes without logger set [\#150](https://github.com/feast-dev/feast/issues/150) -**Merged pull requests:** +### Merged Pull Requests - Remove redis transaction [\#280](https://github.com/feast-dev/feast/pull/280) ([pradithya](https://github.com/pradithya)) - Fix tracing to continue from existing trace created by grpc client [\#245](https://github.com/feast-dev/feast/pull/245) ([pradithya](https://github.com/pradithya)) -## [v0.1.2](https://github.com/feast-dev/feast/tree/v0.1.2) (2019-08-23) +# [v0.1.2](https://github.com/feast-dev/feast/tree/v0.1.2) (2019-08-23) [Full Changelog](https://github.com/feast-dev/feast/compare/v0.1.1...v0.1.2) -**Fixed bugs:** +### Bug Fixes - Batch Import, feature with datetime format issue [\#203](https://github.com/feast-dev/feast/issues/203) - Serving not correctly reporting readiness check if there is no activity [\#190](https://github.com/feast-dev/feast/issues/190) - Serving stop periodically reloading feature specification after a while [\#188](https://github.com/feast-dev/feast/issues/188) -**Merged pull requests:** +### Merged Pull Requests - Add `romanwozniak` to prow owners config [\#216](https://github.com/feast-dev/feast/pull/216) ([romanwozniak](https://github.com/romanwozniak)) - Implement filter for create dataset api [\#215](https://github.com/feast-dev/feast/pull/215) ([pradithya](https://github.com/pradithya)) @@ -1177,36 +1874,36 @@ See [Feast 0.5 Release Issue](https://github.com/feast-dev/feast/issues/527) for - Continuous integration and deployment \(CI/CD\) update [\#183](https://github.com/feast-dev/feast/pull/183) ([davidheryanto](https://github.com/davidheryanto)) - Remove feature specs being able to declare their serving or warehouse stores [\#159](https://github.com/feast-dev/feast/pull/159) ([tims](https://github.com/tims)) -## [v0.1.1](https://github.com/feast-dev/feast/tree/v0.1.1) (2019-04-18) +# [v0.1.1](https://github.com/feast-dev/feast/tree/v0.1.1) (2019-04-18) [Full Changelog](https://github.com/feast-dev/feast/compare/v0.1.0...v0.1.1) -**Fixed bugs:** +### Bug Fixes - Fix BigQuery query template to retrieve training data [\#182](https://github.com/feast-dev/feast/pull/182) ([davidheryanto](https://github.com/davidheryanto)) -**Merged pull requests:** +### Merged Pull Requests - Add python init files [\#176](https://github.com/feast-dev/feast/pull/176) ([zhilingc](https://github.com/zhilingc)) - Change pypi package from Feast to feast [\#173](https://github.com/feast-dev/feast/pull/173) ([zhilingc](https://github.com/zhilingc)) -## [v0.1.0](https://github.com/feast-dev/feast/tree/v0.1.0) (2019-04-09) +# [v0.1.0](https://github.com/feast-dev/feast/tree/v0.1.0) (2019-04-09) [Full Changelog](https://github.com/feast-dev/feast/compare/v0.0.2...v0.1.0) -**Implemented enhancements:** +### Features - Removal of storing historical value of feature in serving storage [\#53](https://github.com/feast-dev/feast/issues/53) - Remove feature "granularity" and relegate to metadata [\#17](https://github.com/feast-dev/feast/issues/17) -**Closed issues:** +### Closes Issues - Add ability to name an import job [\#167](https://github.com/feast-dev/feast/issues/167) - Ingestion retrying an invalid FeatureRow endlessly [\#163](https://github.com/feast-dev/feast/issues/163) - Ability to associate data ingested in Warehouse store to its ingestion job [\#145](https://github.com/feast-dev/feast/issues/145) - Missing \(Fixing\) unit test for FeatureRowKafkaIO [\#132](https://github.com/feast-dev/feast/issues/132) -**Merged pull requests:** +### Merged Pull Requests - Catch all kind of exception to avoid retrying [\#171](https://github.com/feast-dev/feast/pull/171) ([pradithya](https://github.com/pradithya)) - Integration test [\#170](https://github.com/feast-dev/feast/pull/170) ([zhilingc](https://github.com/zhilingc)) @@ -1234,16 +1931,16 @@ See [Feast 0.5 Release Issue](https://github.com/feast-dev/feast/issues/527) for - Coalesce rows [\#89](https://github.com/feast-dev/feast/pull/89) ([tims](https://github.com/tims)) - Remove historical feature in serving store [\#87](https://github.com/feast-dev/feast/pull/87) ([pradithya](https://github.com/pradithya)) -## [v0.0.2](https://github.com/feast-dev/feast/tree/v0.0.2) (2019-03-11) +# [v0.0.2](https://github.com/feast-dev/feast/tree/v0.0.2) (2019-03-11) [Full Changelog](https://github.com/feast-dev/feast/compare/v0.0.1...v0.0.2) -**Implemented enhancements:** +### Features - Coalesce FeatureRows for improved "latest" value consistency in serving stores [\#88](https://github.com/feast-dev/feast/issues/88) - Kafka source [\#22](https://github.com/feast-dev/feast/issues/22) -**Closed issues:** +### Closes Issues - Preload Feast's spec in serving cache [\#151](https://github.com/feast-dev/feast/issues/151) - Feast csv data upload job [\#137](https://github.com/feast-dev/feast/issues/137) @@ -1253,16 +1950,16 @@ See [Feast 0.5 Release Issue](https://github.com/feast-dev/feast/issues/527) for - Listing resources and finding out system state [\#131](https://github.com/feast-dev/feast/issues/131) - Reorganise ingestion store classes to match architecture [\#109](https://github.com/feast-dev/feast/issues/109) -## [v0.0.1](https://github.com/feast-dev/feast/tree/v0.0.1) (2019-02-11) +# [v0.0.1](https://github.com/feast-dev/feast/tree/v0.0.1) (2019-02-11) [Full Changelog](https://github.com/feast-dev/feast/compare/ec9def2bbb06dc759538e4424caadd70f548ea64...v0.0.1) -**Implemented enhancements:** +### Features - Spring boot CLI logs show up as JSON [\#104](https://github.com/feast-dev/feast/issues/104) - Allow for registering feature that doesn't have warehouse store [\#5](https://github.com/feast-dev/feast/issues/5) -**Fixed bugs:** +### Bug Fixes - Error when submitting large import spec [\#125](https://github.com/feast-dev/feast/issues/125) - Ingestion is not ignoring unknown feature in streaming source [\#99](https://github.com/feast-dev/feast/issues/99) @@ -1273,7 +1970,7 @@ See [Feast 0.5 Release Issue](https://github.com/feast-dev/feast/issues/527) for - \[FlinkRunner\] Core should not follow remote flink runner job to completion [\#21](https://github.com/feast-dev/feast/issues/21) - Go packages in protos use incorrect repo [\#16](https://github.com/feast-dev/feast/issues/16) -**Merged pull requests:** +### Merged Pull Requests - Disable test during docker image creation [\#129](https://github.com/feast-dev/feast/pull/129) ([pradithya](https://github.com/pradithya)) - Repackage helm chart [\#127](https://github.com/feast-dev/feast/pull/127) ([pradithya](https://github.com/pradithya)) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 95757ccad7..cb17012eea 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,12 +1,46 @@ # Development Guide: Main Feast Repository -> Please see [Development Guide](https://docs.feast.dev/contributing/development-guide) for project level development instructions. +> Please see [Development Guide](https://docs.feast.dev/project/development-guide) for project level development instructions. -### Overview +## Overview This guide is targeted at developers looking to contribute to Feast components in the main Feast repository: -- [Feast Python SDK / CLI](#feast-python-sdk-%2F-cli) +- [Feast Python SDK / CLI](#feast-python-sdk--cli) +- [Feast Java Serving](#feast-java-serving) - [Feast Go Client](#feast-go-client) -- [Feast Terraform](#feast-terraform) + +## Community +See [Contribution process](https://docs.feast.dev/project/contributing) and [Community](https://docs.feast.dev/community) for details on how to get more involved in the community. + +A quick few highlights: +- [RFCs](https://drive.google.com/drive/u/0/folders/0AAe8j7ZK3sxSUk9PVA) +- [Community Slack](https://slack.feast.dev/) +- [Feast Dev Mailing List](https://groups.google.com/g/feast-dev) +- [Community Calendar](https://calendar.google.com/calendar/u/0?cid=ZTFsZHVhdGM3MDU3YTJucTBwMzNqNW5rajBAZ3JvdXAuY2FsZW5kYXIuZ29vZ2xlLmNvbQ) + - Includes biweekly community calls at 10AM PST + +## Making a pull request +We use the convention that the assignee of a PR is the person with the next action. + +This means that often, the assignee may be empty (if no reviewer has been found yet), the reviewer, or the PR writer if there are comments to be addressed. + +### Pull request checklist +A quick list of things to keep in mind as you're making changes: +- As you make changes + - Make your changes in a [forked repo](#forking-the-repo) (instead of making a branch on the main Feast repo) + - [Sign your commits](#signing-off-commits) as you go (to avoid DCO checks failing) + - [Rebase from master](#incorporating-upstream-changes-from-master) instead of using `git pull` on your PR branch + - Install [pre-commit hooks](#pre-commit-hooks) to ensure all the default linters / formatters are run when you push. +- When you make the PR + - Make a pull request from the forked repo you made + - Ensure you add a GitHub **label** (i.e. a kind tag to the PR (e.g. `kind/bug` or `kind/housekeeping`)) or else checks will fail. + - Ensure you leave a release note for any user facing changes in the PR. There is a field automatically generated in the PR request. You can write `NONE` in that field if there are no user facing changes. + - Please run tests locally before submitting a PR (e.g. for Python, the [local integration tests](#local-integration-tests)) + - Try to keep PRs smaller. This makes them easier to review. + +### Forking the repo +Fork the Feast Github repo and clone your fork locally. Then make changes to a local branch to the fork. + +See [Creating a pull request from a fork](https://docs.github.com/en/github/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-a-pull-request-from-a-fork) ### Pre-commit Hooks Setup [`pre-commit`](https://pre-commit.com/) to automatically lint and format the codebase on commit: @@ -14,18 +48,39 @@ Setup [`pre-commit`](https://pre-commit.com/) to automatically lint and format t 2. Install `pre-commit` with `pip` & install pre-push hooks ```sh pip install pre-commit -pre-commit install --hook-type pre-push +pre-commit install --hook-type pre-commit --hook-type pre-push ``` 3. On push, the pre-commit hook will run. This runs `make format` and `make lint`. +### Signing off commits +> :warning: Warning: using the default integrations with IDEs like VSCode or IntelliJ will not sign commits. +> When you submit a PR, you'll have to re-sign commits to pass the DCO check. + +Use git signoffs to sign your commits. See +https://docs.github.com/en/github/authenticating-to-github/managing-commit-signature-verification for details + +Then, you can sign off commits with the `-s` flag: +``` +git commit -s -m "My first commit" +``` + +GPG-signing commits with `-S` is optional. + +### Incorporating upstream changes from master +Our preference is the use of `git rebase [master]` instead of `git merge` : `git pull -r`. + +Note that this means if you are midway through working through a PR and rebase, you'll have to force push: +`git push --force-with-lease origin [branch name]` + ## Feast Python SDK / CLI ### Environment Setup Setting up your development environment for Feast Python SDK / CLI: -1. Ensure that you have `make`, Python (3.7 and above) with `pip`, installed. -2. _Recommended:_ Create a virtual environment to isolate development dependencies to be installed +1. Ensure that you have Docker installed in your environment. Docker is used to provision service dependencies during testing. +2. Ensure that you have `make`, Python (3.7 and above) with `pip`, installed. +3. _Recommended:_ Create a virtual environment to isolate development dependencies to be installed ```sh # create & activate a virtual environment -python -v venv venv/ +python -m venv venv/ source venv/bin/activate ``` @@ -36,7 +91,7 @@ pip install --upgrade pip 4. Install development dependencies for Feast Python SDK / CLI ```sh -pip install -e "sdk/python[ci]" +pip install -e "sdk/python[dev]" ``` ### Code Style & Linting @@ -70,13 +125,69 @@ make test-python > and [no AWS credentials can be accessed](https://boto3.amazonaws.com/v1/documentation/api/latest/guide/credentials.html#configuring-credentials) by `boto3` > - Ensure Feast Python SDK / CLI is not configured with configuration overrides (ie `~/.feast/config` should be empty). -## Feast Go Client -:warning: Feast Go Client will move to its own standalone repository in the future. +### Integration Tests +There are two sets of tests you can run: +1. Local integration tests (for faster development) +2. Full integration tests (requires cloud environment setups) + +#### Local integration tests +To get local integration tests running, you'll need to have Redis setup: + +Redis +1. Install Redis: [Quickstart](https://redis.io/topics/quickstart) +2. Run `redis-server` +Now run `make test-python-universal-local` + +#### Full integration tests +To test across clouds, on top of setting up Redis, you also need GCP / AWS / Snowflake setup. + +> Note: you can manually control what tests are run today by inspecting +> [RepoConfiguration](https://github.com/feast-dev/feast/blob/master/sdk/python/tests/integration/feature_repos/repo_configuration.py) +> and commenting out tests that are added to `DEFAULT_FULL_REPO_CONFIGS` + +**GCP** +1. Install the [Cloud SDK](https://cloud.google.com/sdk/docs/install). +2. Then run login to gcloud: + ``` + gcloud auth login + gcloud auth application-default login + ``` +3. Export `GCLOUD_PROJECT=[your project]` to your .zshrc + +**AWS** +1. TODO(adchia): flesh out setting up AWS login (or create helper script) +2. Modify `RedshiftDataSourceCreator` to use your credentials + +**Snowflake** +- See https://signup.snowflake.com/ + +Then run `make test-python-integration`. Note that for Snowflake / GCP / AWS, this will create new temporary tables / datasets. + +#### (Experimental) Run full integration tests against containerized services +Test across clouds requires existing accounts on GCP / AWS / Snowflake, and may incur costs when using these services. + +For this approach of running tests, you'll need to have docker set up locally: [Get Docker](https://docs.docker.com/get-docker/) + +It's possible to run some integration tests against emulated local versions of these services, using ephemeral containers. +These tests create new temporary tables / datasets locally only, and they are cleaned up. when the containers are torn down. + +The services with containerized replacements currently implemented are: +- Datastore +- DynamoDB +- Redis + +You can run `make test-python-integration-container` to run tests against the containerized versions of dependencies. + + +## Feast Java Serving +See [Java contributing guide](java/CONTRIBUTING.md) + +## Feast Go Client ### Environment Setup Setting up your development environment for Feast Go SDK: -1. Ensure the following development tools are installed: -- Golang, [`protoc` with the Golang & grpc plugins](https://developers.google.com/protocol-buffers/docs/gotutorial#compiling-your-protocol-buffers) + +- Install Golang, [`protoc` with the Golang & grpc plugins](https://developers.google.com/protocol-buffers/docs/gotutorial#compiling-your-protocol-buffers) ### Building Build the Feast Go Client with the `go` toolchain: @@ -107,12 +218,16 @@ Unit tests for the Feast Go Client can be run as follows: go test ``` -## Feast on Kubernetes -:warning: Feast Terraform will move to its own standalone repository in the future. - -See the deployment guide of the respective cloud providers for how to work with these deployments: -- [Helm Deployment on Kubernetes](https://docs.feast.dev/feast-on-kubernetes/getting-started/install-feast/kubernetes-with-helm) -- [Terraform Deployment on Amazon EKS](https://docs.feast.dev/feast-on-kubernetes/getting-started/install-feast/kubernetes-amazon-eks-with-terraform) -- [Terraform Deployment on Azure AKS](https://docs.feast.dev/feast-on-kubernetes/getting-started/install-feast/kubernetes-azure-aks-with-terraform) -- [Terraform Deployment on Google Cloud GKE](https://docs.feast.dev/feast-on-kubernetes/getting-started/install-feast/google-cloud-gke-with-terraform) -- [Kustomize Deployment on IBM Cloud IKS or OpenShift](https://docs.feast.dev/feast-on-kubernetes/getting-started/install-feast/ibm-cloud-iks-with-kustomize) +### Testing with Github Actions workflows +* Update your current master on your forked branch and make a pull request against your own forked master. +* Enable workflows by going to actions and clicking `Enable Workflows`. + * Pushes will now run your edited workflow yaml file against your test code. + * Unfortunately, in order to test any github workflow changes, you must push the code to the branch and see the output in the actions tab. + +## Issues +* pr-integration-tests workflow is skipped + * Add `ok-to-test` github label. +* pr-integration-tests errors out with `Error: fatal: invalid refspec '+refs/pull//merge:refs/remotes/pull//merge'` + * This is because github actions cannot pull the branch version for some reason so just find your PR number in your pull request header and hard code it into the `uses: actions/checkout@v2` section (i.e replace `refs/pull/${{ github.event.pull_request.number }}/merge` with `refs/pull//merge`) +* AWS/GCP workflow + * Currently still cannot test GCP/AWS workflow without setting up secrets in a forked repository. diff --git a/Makefile b/Makefile index b18c5bca2e..41041d7c08 100644 --- a/Makefile +++ b/Makefile @@ -15,97 +15,192 @@ # ROOT_DIR := $(shell dirname $(realpath $(firstword $(MAKEFILE_LIST)))) -PROTO_TYPE_SUBDIRS = core serving types storage -PROTO_SERVICE_SUBDIRS = core serving +MVN := mvn -f java/pom.xml ${MAVEN_EXTRA_OPTS} OS := linux ifeq ($(shell uname -s), Darwin) OS = osx endif +TRINO_VERSION ?= 376 # General -format: format-python format-go +format: format-python format-java format-go -lint: lint-python lint-go +lint: lint-python lint-java lint-go -test: test-python test-go +test: test-python test-java test-go protos: compile-protos-go compile-protos-python compile-protos-docs -build: protos build-docker build-html - -install-ci-dependencies: install-python-ci-dependencies install-go-ci-dependencies +build: protos build-java build-docker # Python SDK -install-python-ci-dependencies: - pip install -e "sdk/python[ci]" +install-python-ci-dependencies: install-go-proto-dependencies install-go-ci-dependencies + cd sdk/python && python -m piptools sync requirements/py$(PYTHON)-ci-requirements.txt + cd sdk/python && COMPILE_GO=true python setup.py develop + +lock-python-ci-dependencies: + cd sdk/python && python -m piptools compile -U --extra ci --output-file requirements/py$(PYTHON)-ci-requirements.txt package-protos: cp -r ${ROOT_DIR}/protos ${ROOT_DIR}/sdk/python/feast/protos compile-protos-python: - @$(foreach dir,$(PROTO_TYPE_SUBDIRS),cd ${ROOT_DIR}/protos; python -m grpc_tools.protoc -I. --grpc_python_out=../sdk/python/feast/protos/ --python_out=../sdk/python/feast/protos/ --mypy_out=../sdk/python/feast/protos/ feast/$(dir)/*.proto;) - @$(foreach dir,$(PROTO_TYPE_SUBDIRS),grep -rli 'from feast.$(dir)' sdk/python/feast/protos | xargs -I@ sed -i.bak 's/from feast.$(dir)/from feast.protos.feast.$(dir)/g' @;) - cd ${ROOT_DIR}/protos; python -m grpc_tools.protoc -I. --python_out=../sdk/python/ --mypy_out=../sdk/python/ tensorflow_metadata/proto/v0/*.proto + cd sdk/python && python setup.py build_python_protos install-python: - python -m pip install -e sdk/python -U --use-deprecated=legacy-resolver + cd sdk/python && python -m piptools sync requirements/py$(PYTHON)-requirements.txt + cd sdk/python && python setup.py develop + +lock-python-dependencies: + cd sdk/python && python -m piptools compile -U --output-file requirements/py$(PYTHON)-requirements.txt + +benchmark-python: + FEAST_USAGE=False IS_TEST=True python -m pytest --integration --benchmark --benchmark-autosave --benchmark-save-data sdk/python/tests + +benchmark-python-local: + FEAST_USAGE=False IS_TEST=True FEAST_IS_LOCAL_TEST=True python -m pytest --integration --benchmark --benchmark-autosave --benchmark-save-data sdk/python/tests test-python: - FEAST_USAGE=False pytest -n 8 sdk/python/tests + FEAST_USAGE=False IS_TEST=True python -m pytest -n 8 sdk/python/tests test-python-integration: - FEAST_USAGE=False pytest -n 8 --integration sdk/python/tests + FEAST_USAGE=False IS_TEST=True python -m pytest -n 8 --integration sdk/python/tests + +test-python-integration-container: + FEAST_USAGE=False IS_TEST=True FEAST_LOCAL_ONLINE_CONTAINER=True python -m pytest -n 8 --integration sdk/python/tests + +test-python-universal-contrib: + PYTHONPATH='.' FULL_REPO_CONFIGS_MODULE=sdk.python.feast.infra.offline_stores.contrib.contrib_repo_configuration FEAST_USAGE=False IS_TEST=True python -m pytest -n 8 --integration --universal sdk/python/tests + +test-python-universal-local: + FEAST_USAGE=False IS_TEST=True FEAST_IS_LOCAL_TEST=True python -m pytest -n 8 --integration --universal sdk/python/tests + +test-python-universal: + FEAST_USAGE=False IS_TEST=True python -m pytest -n 8 --integration --universal sdk/python/tests + +test-python-go-server: compile-go-lib + FEAST_USAGE=False IS_TEST=True FEAST_GO_FEATURE_RETRIEVAL=True pytest --integration --goserver sdk/python/tests format-python: # Sort - cd ${ROOT_DIR}/sdk/python; isort feast/ tests/ + cd ${ROOT_DIR}/sdk/python; python -m isort feast/ tests/ # Format - cd ${ROOT_DIR}/sdk/python; black --target-version py37 feast tests + cd ${ROOT_DIR}/sdk/python; python -m black --target-version py37 feast tests lint-python: - cd ${ROOT_DIR}/sdk/python; mypy feast/ tests/ - cd ${ROOT_DIR}/sdk/python; isort feast/ tests/ --check-only - cd ${ROOT_DIR}/sdk/python; flake8 feast/ tests/ - cd ${ROOT_DIR}/sdk/python; black --check feast tests + cd ${ROOT_DIR}/sdk/python; python -m mypy + cd ${ROOT_DIR}/sdk/python; python -m isort feast/ tests/ --check-only + cd ${ROOT_DIR}/sdk/python; python -m flake8 feast/ tests/ + cd ${ROOT_DIR}/sdk/python; python -m black --check feast tests + +# Java + +install-java-ci-dependencies: + ${MVN} verify clean --fail-never + +format-java: + ${MVN} spotless:apply + +lint-java: + ${MVN} --no-transfer-progress spotless:check + +test-java: + ${MVN} --no-transfer-progress -DskipITs=true test + +test-java-integration: + ${MVN} --no-transfer-progress -Dmaven.javadoc.skip=true -Dgpg.skip -DskipUTs=true clean verify -# Go SDK +test-java-with-coverage: + ${MVN} --no-transfer-progress -DskipITs=true test jacoco:report-aggregate + +build-java: + ${MVN} clean verify + +build-java-no-tests: + ${MVN} --no-transfer-progress -Dmaven.javadoc.skip=true -Dgpg.skip -DskipUTs=true -DskipITs=true -Drevision=${REVISION} clean package + +# Trino plugin +start-trino-locally: + cd ${ROOT_DIR}; docker run --detach --rm -p 8080:8080 --name trino -v ${ROOT_DIR}/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/test_config/properties/:/etc/catalog/:ro trinodb/trino:${TRINO_VERSION} + sleep 15 + +test-trino-plugin-locally: + cd ${ROOT_DIR}/sdk/python; FULL_REPO_CONFIGS_MODULE=feast.infra.offline_stores.contrib.trino_offline_store.test_config.manual_tests FEAST_USAGE=False IS_TEST=True python -m pytest --integration --universal tests/ + +kill-trino-locally: + cd ${ROOT_DIR}; docker stop trino + +# Go SDK & embedded + +install-go-proto-dependencies: + go install google.golang.org/protobuf/cmd/protoc-gen-go@v1.26.0 + go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@v1.1.0 install-go-ci-dependencies: - go get -u github.com/golang/protobuf/protoc-gen-go - go get -u golang.org/x/lint/golint + # ToDo: currently gopy installation doesn't work w/o explicit go get in the next line + # ToDo: there should be a better way to install gopy + go get github.com/go-python/gopy + go install golang.org/x/tools/cmd/goimports + go install github.com/go-python/gopy + python -m pip install pybindgen==0.22.0 -compile-protos-go: - cd ${ROOT_DIR}/protos; protoc -I/usr/local/include -I. --go_out=plugins=grpc,paths=source_relative:../sdk/go/protos/ tensorflow_metadata/proto/v0/*.proto - $(foreach dir,types serving core storage,cd ${ROOT_DIR}/protos; protoc -I/usr/local/include -I. --go_out=plugins=grpc,paths=source_relative:../sdk/go/protos feast/$(dir)/*.proto;) +install-protoc-dependencies: + pip install grpcio-tools==1.44.0 mypy-protobuf==3.1.0 -test-go: - cd ${ROOT_DIR}/sdk/go; go test ./... +compile-protos-go: install-go-proto-dependencies install-protoc-dependencies + cd sdk/python && python setup.py build_go_protos + +compile-go-lib: install-go-proto-dependencies install-go-ci-dependencies + cd sdk/python && python setup.py build_go_lib + +# Needs feast package to setup the feature store +test-go: compile-protos-go + pip install -e "sdk/python[ci]" + go test ./... format-go: - cd ${ROOT_DIR}/sdk/go; gofmt -s -w *.go + gofmt -s -w go/ -lint-go: - cd ${ROOT_DIR}/sdk/go; go vet +lint-go: compile-protos-go + go vet ./go/internal/feast ./go/cmd/server # Docker -build-push-docker: - @$(MAKE) build-docker registry=$(REGISTRY) version=$(VERSION) - @$(MAKE) push-ci-docker registry=$(REGISTRY) version=$(VERSION) - -build-docker: build-ci-docker +build-docker: build-ci-docker build-feature-server-python-aws-docker build-feature-transformation-server-docker build-feature-server-java-docker push-ci-docker: docker push $(REGISTRY)/feast-ci:$(VERSION) +# TODO(adchia): consider removing. This doesn't run successfully right now build-ci-docker: docker build -t $(REGISTRY)/feast-ci:$(VERSION) -f infra/docker/ci/Dockerfile . -build-local-test-docker: - docker build -t feast:local -f infra/docker/tests/Dockerfile . +push-feature-server-python-aws-docker: + docker push $(REGISTRY)/feature-server-python-aws:$$VERSION + +build-feature-server-python-aws-docker: + docker build --build-arg VERSION=$$VERSION \ + -t $(REGISTRY)/feature-server-python-aws:$$VERSION \ + -f sdk/python/feast/infra/feature_servers/aws_lambda/Dockerfile . + +push-feature-transformation-server-docker: + docker push $(REGISTRY)/feature-transformation-server:$(VERSION) + +build-feature-transformation-server-docker: + docker build --build-arg VERSION=$(VERSION) \ + -t $(REGISTRY)/feature-transformation-server:$(VERSION) \ + -f sdk/python/feast/infra/transformation_servers/Dockerfile . + +push-feature-server-java-docker: + docker push $(REGISTRY)/feature-server-java:$(VERSION) + +build-feature-server-java-docker: + docker build --build-arg VERSION=$(VERSION) \ + -t $(REGISTRY)/feature-server-java:$(VERSION) \ + -f java/infra/docker/feature-server/Dockerfile . # Documentation @@ -133,3 +228,6 @@ compile-protos-docs: build-sphinx: compile-protos-python cd $(ROOT_DIR)/sdk/python/docs && $(MAKE) build-api-source + +build-templates: + python infra/scripts/compile-templates.py diff --git a/OWNERS b/OWNERS index 7d42ec60f8..bc7342c3d6 100644 --- a/OWNERS +++ b/OWNERS @@ -1,11 +1,22 @@ +# This file is different from the CODEOWNERS file. +# OWNERS is used by feast-ci-bot to accept commands like `/ok-to-test` and `/lgtm` +# More info at https://www.kubernetes.dev/docs/guide/owners/ approvers: - woop - - jklegar - tsotnet - achals + - adchia + - felixwang9817 + - mavysavydav + - MattDelac + - kevjumba reviewers: - woop - - jklegar - tsotnet - achals - tedhtchang + - adchia + - felixwang9817 + - mavysavydav + - MattDelac + - kevjumba diff --git a/README.md b/README.md index 4fe0c11083..af4df06175 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,5 @@ + +

@@ -6,7 +8,8 @@
[![unit-tests](https://github.com/feast-dev/feast/actions/workflows/unit_tests.yml/badge.svg?branch=master&event=push)](https://github.com/feast-dev/feast/actions/workflows/unit_tests.yml) -[![integration-tests](https://github.com/feast-dev/feast/actions/workflows/integration_tests.yml/badge.svg?branch=master&event=push)](https://github.com/feast-dev/feast/actions/workflows/integration_tests.yml) +[![integration-tests-and-build](https://github.com/feast-dev/feast/actions/workflows/master_only.yml/badge.svg?branch=master&event=push)](https://github.com/feast-dev/feast/actions/workflows/master_only.yml) +[![java-integration-tests](https://github.com/feast-dev/feast/actions/workflows/java_master_only.yml/badge.svg?branch=master&event=push)](https://github.com/feast-dev/feast/actions/workflows/java_master_only.yml) [![linter](https://github.com/feast-dev/feast/actions/workflows/linter.yml/badge.svg?branch=master&event=push)](https://github.com/feast-dev/feast/actions/workflows/linter.yml) [![Docs Latest](https://img.shields.io/badge/docs-latest-blue.svg)](https://docs.feast.dev/) [![Python API](https://img.shields.io/readthedocs/feast/master?label=Python%20API)](http://rtd.feast.dev/) @@ -19,12 +22,12 @@ Feast is an open source feature store for machine learning. Feast is the fastest Please see our [documentation](https://docs.feast.dev/) for more information about the project. -## Architecture - +## 📐 Architecture +![](docs/assets/feast-marchitecture.png) -The above architecture is the minimal Feast deployment. Want to run the full Feast on Kubernetes? Click [here](https://docs.feast.dev/feast-on-kubernetes/getting-started). +The above architecture is the minimal Feast deployment. Want to run the full Feast on Snowflake/GCP/AWS? Click [here](https://docs.feast.dev/how-to-guides/feast-snowflake-gcp-aws). -## Getting Started +## 🐣 Getting Started ### 1. Install Feast ```commandline @@ -42,7 +45,11 @@ cd my_feature_repo feast apply ``` -### 4. Build a training dataset +### 4. Explore your data in the web UI (experimental) + +![Web UI](ui/sample.png) + +### 5. Build a training dataset ```python from feast import FeatureStore import pandas as pd @@ -61,8 +68,8 @@ entity_df = pd.DataFrame.from_dict({ store = FeatureStore(repo_path=".") training_df = store.get_historical_features( - entity_df=entity_df, - feature_refs = [ + entity_df=entity_df, + features = [ 'driver_hourly_stats:conv_rate', 'driver_hourly_stats:acc_rate', 'driver_hourly_stats:avg_daily_trips' @@ -83,7 +90,7 @@ print(training_df.head()) ``` -### 5. Load feature values into your online store +### 6. Load feature values into your online store ```commandline CURRENT_TIME=$(date -u +"%Y-%m-%dT%H:%M:%S") feast materialize-incremental $CURRENT_TIME @@ -93,7 +100,7 @@ feast materialize-incremental $CURRENT_TIME Materializing feature view driver_hourly_stats from 2021-04-14 to 2021-04-15 done! ``` -### 6. Read online features at low latency +### 7. Read online features at low latency ```python from pprint import pprint from feast import FeatureStore @@ -101,7 +108,7 @@ from feast import FeatureStore store = FeatureStore(repo_path=".") feature_vector = store.get_online_features( - feature_refs=[ + features=[ 'driver_hourly_stats:conv_rate', 'driver_hourly_stats:acc_rate', 'driver_hourly_stats:avg_daily_trips' @@ -123,25 +130,104 @@ pprint(feature_vector) } ``` -## Important resources +## 📦 Functionality and Roadmap + +The list below contains the functionality that contributors are planning to develop for Feast + +* Items below that are in development (or planned for development) will be indicated in parentheses. +* We welcome contribution to all items in the roadmap! +* Want to influence our roadmap and prioritization? Submit your feedback to [this form](https://docs.google.com/forms/d/e/1FAIpQLSfa1nRQ0sKz-JEFnMMCi4Jseag\_yDssO\_3nV9qMfxfrkil-wA/viewform). +* Want to speak to a Feast contributor? We are more than happy to jump on a call. Please schedule a time using [Calendly](https://calendly.com/d/x2ry-g5bb/meet-with-feast-team). + +* **Data Sources** + * [x] [Snowflake source](https://docs.feast.dev/reference/data-sources/snowflake) + * [x] [Redshift source](https://docs.feast.dev/reference/data-sources/redshift) + * [x] [BigQuery source](https://docs.feast.dev/reference/data-sources/bigquery) + * [x] [Parquet file source](https://docs.feast.dev/reference/data-sources/file) + * [x] [Synapse source (community plugin)](https://github.com/Azure/feast-azure) + * [x] [Hive (community plugin)](https://github.com/baineng/feast-hive) + * [x] [Postgres (community plugin)](https://github.com/nossrannug/feast-postgres) + * [x] [Spark (community plugin)](https://docs.feast.dev/reference/data-sources/spark) + * [x] Kafka / Kinesis sources (via [push support into the online store](https://docs.feast.dev/reference/data-sources/push)) + * [ ] HTTP source +* **Offline Stores** + * [x] [Snowflake](https://docs.feast.dev/reference/offline-stores/snowflake) + * [x] [Redshift](https://docs.feast.dev/reference/offline-stores/redshift) + * [x] [BigQuery](https://docs.feast.dev/reference/offline-stores/bigquery) + * [x] [Synapse (community plugin)](https://github.com/Azure/feast-azure) + * [x] [Hive (community plugin)](https://github.com/baineng/feast-hive) + * [x] [Postgres (community plugin)](https://github.com/nossrannug/feast-postgres) + * [x] [Trino (community plugin)](https://github.com/Shopify/feast-trino) + * [x] [Spark (community plugin)](https://docs.feast.dev/reference/offline-stores/spark) + * [x] [In-memory / Pandas](https://docs.feast.dev/reference/offline-stores/file) + * [x] [Custom offline store support](https://docs.feast.dev/how-to-guides/adding-a-new-offline-store) +* **Online Stores** + * [x] [DynamoDB](https://docs.feast.dev/reference/online-stores/dynamodb) + * [x] [Redis](https://docs.feast.dev/reference/online-stores/redis) + * [x] [Datastore](https://docs.feast.dev/reference/online-stores/datastore) + * [x] [SQLite](https://docs.feast.dev/reference/online-stores/sqlite) + * [x] [Azure Cache for Redis (community plugin)](https://github.com/Azure/feast-azure) + * [x] [Postgres (community plugin)](https://github.com/nossrannug/feast-postgres) + * [x] [Custom online store support](https://docs.feast.dev/how-to-guides/adding-support-for-a-new-online-store) + * [ ] Bigtable (in progress) + * [ ] Cassandra +* **Streaming** + * [x] [Custom streaming ingestion job support](https://docs.feast.dev/how-to-guides/creating-a-custom-provider) + * [x] [Push based streaming data ingestion](https://docs.feast.dev/reference/data-sources/push.md) + * [ ] Streaming ingestion on AWS + * [ ] Streaming ingestion on GCP +* **Feature Engineering** + * [x] On-demand Transformations (Alpha release. See [RFC](https://docs.google.com/document/d/1lgfIw0Drc65LpaxbUu49RCeJgMew547meSJttnUqz7c/edit#)) + * [ ] Batch transformation (In progress. See [RFC](https://docs.google.com/document/d/1964OkzuBljifDvkV-0fakp2uaijnVzdwWNGdz7Vz50A/edit)) + * [ ] Streaming transformation +* **Deployments** + * [x] AWS Lambda (Alpha release. See [RFC](https://docs.google.com/document/d/1eZWKWzfBif66LDN32IajpaG-j82LSHCCOzY6R7Ax7MI/edit)) + * [x] Kubernetes (See [guide](https://docs.feast.dev/how-to-guides/running-feast-in-production#4.3.-java-based-feature-server-deployed-on-kubernetes)) + * [ ] Cloud Run + * [ ] KNative +* **Feature Serving** + * [x] Python Client + * [x] REST Feature Server (Python) (Alpha release. See [RFC](https://docs.google.com/document/d/1iXvFhAsJ5jgAhPOpTdB3j-Wj1S9x3Ev\_Wr6ZpnLzER4/edit)) + * [x] gRPC Feature Server (Java) (See [#1497](https://github.com/feast-dev/feast/issues/1497)) + * [x] Push API + * [ ] Java Client + * [ ] Go Client + * [ ] Delete API + * [ ] Feature Logging (for training) +* **Data Quality Management (See [RFC](https://docs.google.com/document/d/110F72d4NTv80p35wDSONxhhPBqWRwbZXG4f9mNEMd98/edit))** + * [x] Data profiling and validation (Great Expectations) + * [ ] Training-serving skew detection (in progress) + * [ ] Metric production + * [ ] Drift detection +* **Feature Discovery and Governance** + * [x] Python SDK for browsing feature registry + * [x] CLI for browsing feature registry + * [x] Model-centric feature tracking (feature services) + * [x] Amundsen integration (see [Feast extractor](https://github.com/amundsen-io/amundsen/blob/main/databuilder/databuilder/extractor/feast_extractor.py)) + * [x] Feast Web UI (alpha) + * [ ] REST API for browsing feature registry + * [ ] Feature versioning + + +## 🎓 Important Resources Please refer to the official documentation at [Documentation](https://docs.feast.dev/) - * [Quickstart](https://docs.feast.dev/quickstart) - * [Roadmap](https://docs.feast.dev/roadmap) - * [Feast on Kubernetes](https://docs.feast.dev/feast-on-kubernetes/getting-started) + * [Quickstart](https://docs.feast.dev/getting-started/quickstart) + * [Tutorials](https://docs.feast.dev/tutorials/tutorials-overview) + * [Running Feast with Snowflake/GCP/AWS](https://docs.feast.dev/how-to-guides/feast-snowflake-gcp-aws) * [Change Log](https://github.com/feast-dev/feast/blob/master/CHANGELOG.md) * [Slack (#Feast)](https://slack.feast.dev/) -## Contributing +## 👋 Contributing Feast is a community project and is still under active development. Please have a look at our contributing and development guides if you want to contribute to the project: -- [Contribution Process for Feast](https://docs.feast.dev/contributing/contributing) -- [Development Guide for Feast](https://docs.feast.dev/contributing/development-guide) +- [Contribution Process for Feast](https://docs.feast.dev/project/contributing) +- [Development Guide for Feast](https://docs.feast.dev/project/development-guide) - [Development Guide for the Main Feast Repository](./CONTRIBUTING.md) -## Contributors ✨ +## ✨ Contributors Thanks goes to these incredible people:
- + \ No newline at end of file diff --git a/docs/docs/.gitbook/assets/basic-architecture-diagram.svg b/docs/.gitbook/assets/basic-architecture-diagram (3) (3) (3) (3) (3) (3) (1) (1) (1) (2) (1) (1).svg similarity index 100% rename from docs/docs/.gitbook/assets/basic-architecture-diagram.svg rename to docs/.gitbook/assets/basic-architecture-diagram (3) (3) (3) (3) (3) (3) (1) (1) (1) (2) (1) (1).svg diff --git a/docs/.gitbook/assets/basic-architecture-diagram (3) (3) (3) (3) (3) (3) (1) (1) (1) (2) (1) (2).svg b/docs/.gitbook/assets/basic-architecture-diagram (3) (3) (3) (3) (3) (3) (1) (1) (1) (2) (1) (2).svg new file mode 100644 index 0000000000..b707f49046 --- /dev/null +++ b/docs/.gitbook/assets/basic-architecture-diagram (3) (3) (3) (3) (3) (3) (1) (1) (1) (2) (1) (2).svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/.gitbook/assets/basic-architecture-diagram (3) (3) (3) (3) (3) (3) (1) (1) (1) (2) (1).svg b/docs/.gitbook/assets/basic-architecture-diagram (3) (3) (3) (3) (3) (3) (1) (1) (1) (2) (1).svg new file mode 100644 index 0000000000..b707f49046 --- /dev/null +++ b/docs/.gitbook/assets/basic-architecture-diagram (3) (3) (3) (3) (3) (3) (1) (1) (1) (2) (1).svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/.gitbook/assets/basic-architecture-diagram (3) (3) (3) (3) (3) (3) (1) (1) (1) (2) (2).svg b/docs/.gitbook/assets/basic-architecture-diagram (3) (3) (3) (3) (3) (3) (1) (1) (1) (2) (2).svg new file mode 100644 index 0000000000..b707f49046 --- /dev/null +++ b/docs/.gitbook/assets/basic-architecture-diagram (3) (3) (3) (3) (3) (3) (1) (1) (1) (2) (2).svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/.gitbook/assets/blank-diagram-4 (4) (4) (4) (4) (4) (4) (4) (4) (2) (1) (1) (1) (1) (2) (1) (1).svg b/docs/.gitbook/assets/blank-diagram-4 (4) (4) (4) (4) (4) (4) (4) (4) (2) (1) (1) (1) (1) (2) (1) (1).svg new file mode 100644 index 0000000000..fb5e0659e5 --- /dev/null +++ b/docs/.gitbook/assets/blank-diagram-4 (4) (4) (4) (4) (4) (4) (4) (4) (2) (1) (1) (1) (1) (2) (1) (1).svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/.gitbook/assets/blank-diagram-4 (4) (4) (4) (4) (4) (4) (4) (4) (2) (1) (1) (1) (1) (2) (1) (2).svg b/docs/.gitbook/assets/blank-diagram-4 (4) (4) (4) (4) (4) (4) (4) (4) (2) (1) (1) (1) (1) (2) (1) (2).svg new file mode 100644 index 0000000000..fb5e0659e5 --- /dev/null +++ b/docs/.gitbook/assets/blank-diagram-4 (4) (4) (4) (4) (4) (4) (4) (4) (2) (1) (1) (1) (1) (2) (1) (2).svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/.gitbook/assets/blank-diagram-4 (4) (4) (4) (4) (4) (4) (4) (4) (2) (1) (1) (1) (1) (2) (1) (3).svg b/docs/.gitbook/assets/blank-diagram-4 (4) (4) (4) (4) (4) (4) (4) (4) (2) (1) (1) (1) (1) (2) (1) (3).svg new file mode 100644 index 0000000000..fb5e0659e5 --- /dev/null +++ b/docs/.gitbook/assets/blank-diagram-4 (4) (4) (4) (4) (4) (4) (4) (4) (2) (1) (1) (1) (1) (2) (1) (3).svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/.gitbook/assets/blank-diagram-4 (4) (4) (4) (4) (4) (4) (4) (4) (2) (1) (1) (1) (1) (2) (1).svg b/docs/.gitbook/assets/blank-diagram-4 (4) (4) (4) (4) (4) (4) (4) (4) (2) (1) (1) (1) (1) (2) (1).svg new file mode 100644 index 0000000000..fb5e0659e5 --- /dev/null +++ b/docs/.gitbook/assets/blank-diagram-4 (4) (4) (4) (4) (4) (4) (4) (4) (2) (1) (1) (1) (1) (2) (1).svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/.gitbook/assets/blank-diagram-4 (4) (4) (4) (4) (4) (4) (4) (4) (2) (1) (1) (1) (1) (2) (2).svg b/docs/.gitbook/assets/blank-diagram-4 (4) (4) (4) (4) (4) (4) (4) (4) (2) (1) (1) (1) (1) (2) (2).svg new file mode 100644 index 0000000000..fb5e0659e5 --- /dev/null +++ b/docs/.gitbook/assets/blank-diagram-4 (4) (4) (4) (4) (4) (4) (4) (4) (2) (1) (1) (1) (1) (2) (2).svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/.gitbook/assets/blank-diagram-4 (4) (4) (4) (4) (4) (4) (4) (4) (2) (1) (1) (1) (1) (2) (3).svg b/docs/.gitbook/assets/blank-diagram-4 (4) (4) (4) (4) (4) (4) (4) (4) (2) (1) (1) (1) (1) (2) (3).svg new file mode 100644 index 0000000000..fb5e0659e5 --- /dev/null +++ b/docs/.gitbook/assets/blank-diagram-4 (4) (4) (4) (4) (4) (4) (4) (4) (2) (1) (1) (1) (1) (2) (3).svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/.gitbook/assets/colab_logo_32px.png b/docs/.gitbook/assets/colab_logo_32px.png new file mode 100644 index 0000000000..880d31b495 Binary files /dev/null and b/docs/.gitbook/assets/colab_logo_32px.png differ diff --git a/docs/.gitbook/assets/data-systems-fraud-2x.jpg b/docs/.gitbook/assets/data-systems-fraud-2x.jpg new file mode 100644 index 0000000000..f5a0dd450b Binary files /dev/null and b/docs/.gitbook/assets/data-systems-fraud-2x.jpg differ diff --git a/docs/.gitbook/assets/datastore_online_example (1).png b/docs/.gitbook/assets/datastore_online_example (1).png new file mode 100644 index 0000000000..9c9b644d66 Binary files /dev/null and b/docs/.gitbook/assets/datastore_online_example (1).png differ diff --git a/docs/.gitbook/assets/datastore_online_example.png b/docs/.gitbook/assets/datastore_online_example.png new file mode 100644 index 0000000000..9c9b644d66 Binary files /dev/null and b/docs/.gitbook/assets/datastore_online_example.png differ diff --git a/docs/.gitbook/assets/feast-architecture-diagrams (1) (1) (1) (2) (3) (4) (3) (1) (1) (1) (1) (2) (1) (1).svg b/docs/.gitbook/assets/feast-architecture-diagrams (1) (1) (1) (2) (3) (4) (3) (1) (1) (1) (1) (2) (1) (1).svg new file mode 100644 index 0000000000..7335c131c4 --- /dev/null +++ b/docs/.gitbook/assets/feast-architecture-diagrams (1) (1) (1) (2) (3) (4) (3) (1) (1) (1) (1) (2) (1) (1).svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/.gitbook/assets/feast-architecture-diagrams (1) (1) (1) (2) (3) (4) (3) (1) (1) (1) (1) (2) (1) (2).svg b/docs/.gitbook/assets/feast-architecture-diagrams (1) (1) (1) (2) (3) (4) (3) (1) (1) (1) (1) (2) (1) (2).svg new file mode 100644 index 0000000000..7335c131c4 --- /dev/null +++ b/docs/.gitbook/assets/feast-architecture-diagrams (1) (1) (1) (2) (3) (4) (3) (1) (1) (1) (1) (2) (1) (2).svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/.gitbook/assets/feast-architecture-diagrams (1) (1) (1) (2) (3) (4) (3) (1) (1) (1) (1) (2) (1) (3).svg b/docs/.gitbook/assets/feast-architecture-diagrams (1) (1) (1) (2) (3) (4) (3) (1) (1) (1) (1) (2) (1) (3).svg new file mode 100644 index 0000000000..7335c131c4 --- /dev/null +++ b/docs/.gitbook/assets/feast-architecture-diagrams (1) (1) (1) (2) (3) (4) (3) (1) (1) (1) (1) (2) (1) (3).svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/.gitbook/assets/feast-architecture-diagrams (1) (1) (1) (2) (3) (4) (3) (1) (1) (1) (1) (2) (1) (4).svg b/docs/.gitbook/assets/feast-architecture-diagrams (1) (1) (1) (2) (3) (4) (3) (1) (1) (1) (1) (2) (1) (4).svg new file mode 100644 index 0000000000..7335c131c4 --- /dev/null +++ b/docs/.gitbook/assets/feast-architecture-diagrams (1) (1) (1) (2) (3) (4) (3) (1) (1) (1) (1) (2) (1) (4).svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/.gitbook/assets/feast-architecture-diagrams (1) (1) (1) (2) (3) (4) (3) (1) (1) (1) (1) (2) (1) (5).svg b/docs/.gitbook/assets/feast-architecture-diagrams (1) (1) (1) (2) (3) (4) (3) (1) (1) (1) (1) (2) (1) (5).svg new file mode 100644 index 0000000000..7335c131c4 --- /dev/null +++ b/docs/.gitbook/assets/feast-architecture-diagrams (1) (1) (1) (2) (3) (4) (3) (1) (1) (1) (1) (2) (1) (5).svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/.gitbook/assets/feast-architecture-diagrams (1) (1) (1) (2) (3) (4) (3) (1) (1) (1) (1) (2) (1) (6).svg b/docs/.gitbook/assets/feast-architecture-diagrams (1) (1) (1) (2) (3) (4) (3) (1) (1) (1) (1) (2) (1) (6).svg new file mode 100644 index 0000000000..7335c131c4 --- /dev/null +++ b/docs/.gitbook/assets/feast-architecture-diagrams (1) (1) (1) (2) (3) (4) (3) (1) (1) (1) (1) (2) (1) (6).svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/.gitbook/assets/feast-architecture-diagrams (1) (1) (1) (2) (3) (4) (3) (1) (1) (1) (1) (2) (1).svg b/docs/.gitbook/assets/feast-architecture-diagrams (1) (1) (1) (2) (3) (4) (3) (1) (1) (1) (1) (2) (1).svg new file mode 100644 index 0000000000..7335c131c4 --- /dev/null +++ b/docs/.gitbook/assets/feast-architecture-diagrams (1) (1) (1) (2) (3) (4) (3) (1) (1) (1) (1) (2) (1).svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/.gitbook/assets/feast-architecture-diagrams (1) (1) (1) (2) (3) (4) (3) (1) (1) (1) (1) (2) (2).svg b/docs/.gitbook/assets/feast-architecture-diagrams (1) (1) (1) (2) (3) (4) (3) (1) (1) (1) (1) (2) (2).svg new file mode 100644 index 0000000000..7335c131c4 --- /dev/null +++ b/docs/.gitbook/assets/feast-architecture-diagrams (1) (1) (1) (2) (3) (4) (3) (1) (1) (1) (1) (2) (2).svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/.gitbook/assets/feast-architecture-diagrams (1) (1) (1) (2) (3) (4) (3) (1) (1) (1) (1) (2) (3).svg b/docs/.gitbook/assets/feast-architecture-diagrams (1) (1) (1) (2) (3) (4) (3) (1) (1) (1) (1) (2) (3).svg new file mode 100644 index 0000000000..7335c131c4 --- /dev/null +++ b/docs/.gitbook/assets/feast-architecture-diagrams (1) (1) (1) (2) (3) (4) (3) (1) (1) (1) (1) (2) (3).svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/.gitbook/assets/feast-architecture-diagrams (1) (1) (1) (2) (3) (4) (3) (1) (1) (1) (1) (2) (4).svg b/docs/.gitbook/assets/feast-architecture-diagrams (1) (1) (1) (2) (3) (4) (3) (1) (1) (1) (1) (2) (4).svg new file mode 100644 index 0000000000..7335c131c4 --- /dev/null +++ b/docs/.gitbook/assets/feast-architecture-diagrams (1) (1) (1) (2) (3) (4) (3) (1) (1) (1) (1) (2) (4).svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/.gitbook/assets/feast-architecture-diagrams (1) (1) (1) (2) (3) (4) (3) (1) (1) (1) (1) (2) (5).svg b/docs/.gitbook/assets/feast-architecture-diagrams (1) (1) (1) (2) (3) (4) (3) (1) (1) (1) (1) (2) (5).svg new file mode 100644 index 0000000000..7335c131c4 --- /dev/null +++ b/docs/.gitbook/assets/feast-architecture-diagrams (1) (1) (1) (2) (3) (4) (3) (1) (1) (1) (1) (2) (5).svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/.gitbook/assets/feast-architecture-diagrams (1) (1) (1) (2) (3) (4) (3) (1) (1) (1) (1) (2) (6).svg b/docs/.gitbook/assets/feast-architecture-diagrams (1) (1) (1) (2) (3) (4) (3) (1) (1) (1) (1) (2) (6).svg new file mode 100644 index 0000000000..7335c131c4 --- /dev/null +++ b/docs/.gitbook/assets/feast-architecture-diagrams (1) (1) (1) (2) (3) (4) (3) (1) (1) (1) (1) (2) (6).svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/docs/.gitbook/assets/feast-docs-overview-diagram-2.svg b/docs/.gitbook/assets/feast-docs-overview-diagram-2 (5) (1) (2) (1) (1).svg similarity index 100% rename from docs/docs/.gitbook/assets/feast-docs-overview-diagram-2.svg rename to docs/.gitbook/assets/feast-docs-overview-diagram-2 (5) (1) (2) (1) (1).svg diff --git a/docs/.gitbook/assets/feast-docs-overview-diagram-2 (5) (1) (2) (1) (2).svg b/docs/.gitbook/assets/feast-docs-overview-diagram-2 (5) (1) (2) (1) (2).svg new file mode 100644 index 0000000000..7f30963ec7 --- /dev/null +++ b/docs/.gitbook/assets/feast-docs-overview-diagram-2 (5) (1) (2) (1) (2).svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/.gitbook/assets/feast-docs-overview-diagram-2 (5) (1) (2) (1) (3).svg b/docs/.gitbook/assets/feast-docs-overview-diagram-2 (5) (1) (2) (1) (3).svg new file mode 100644 index 0000000000..7f30963ec7 --- /dev/null +++ b/docs/.gitbook/assets/feast-docs-overview-diagram-2 (5) (1) (2) (1) (3).svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/.gitbook/assets/feast-docs-overview-diagram-2 (5) (1) (2) (1) (4).svg b/docs/.gitbook/assets/feast-docs-overview-diagram-2 (5) (1) (2) (1) (4).svg new file mode 100644 index 0000000000..7f30963ec7 --- /dev/null +++ b/docs/.gitbook/assets/feast-docs-overview-diagram-2 (5) (1) (2) (1) (4).svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/.gitbook/assets/feast-docs-overview-diagram-2 (5) (1) (2) (1) (5).svg b/docs/.gitbook/assets/feast-docs-overview-diagram-2 (5) (1) (2) (1) (5).svg new file mode 100644 index 0000000000..7f30963ec7 --- /dev/null +++ b/docs/.gitbook/assets/feast-docs-overview-diagram-2 (5) (1) (2) (1) (5).svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/.gitbook/assets/feast-docs-overview-diagram-2 (5) (1) (2) (1).svg b/docs/.gitbook/assets/feast-docs-overview-diagram-2 (5) (1) (2) (1).svg new file mode 100644 index 0000000000..7f30963ec7 --- /dev/null +++ b/docs/.gitbook/assets/feast-docs-overview-diagram-2 (5) (1) (2) (1).svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/.gitbook/assets/feast-docs-overview-diagram-2 (5) (1) (2) (2).svg b/docs/.gitbook/assets/feast-docs-overview-diagram-2 (5) (1) (2) (2).svg new file mode 100644 index 0000000000..7f30963ec7 --- /dev/null +++ b/docs/.gitbook/assets/feast-docs-overview-diagram-2 (5) (1) (2) (2).svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/.gitbook/assets/feast-docs-overview-diagram-2 (5) (1) (2) (3).svg b/docs/.gitbook/assets/feast-docs-overview-diagram-2 (5) (1) (2) (3).svg new file mode 100644 index 0000000000..7f30963ec7 --- /dev/null +++ b/docs/.gitbook/assets/feast-docs-overview-diagram-2 (5) (1) (2) (3).svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/.gitbook/assets/feast-docs-overview-diagram-2 (5) (1) (2) (4).svg b/docs/.gitbook/assets/feast-docs-overview-diagram-2 (5) (1) (2) (4).svg new file mode 100644 index 0000000000..7f30963ec7 --- /dev/null +++ b/docs/.gitbook/assets/feast-docs-overview-diagram-2 (5) (1) (2) (4).svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/.gitbook/assets/feast-docs-overview-diagram-2 (5) (1) (2) (5).svg b/docs/.gitbook/assets/feast-docs-overview-diagram-2 (5) (1) (2) (5).svg new file mode 100644 index 0000000000..7f30963ec7 --- /dev/null +++ b/docs/.gitbook/assets/feast-docs-overview-diagram-2 (5) (1) (2) (5).svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/.gitbook/assets/feast-landing-page-blog-post-page-5 (1) (1) (1) (2) (2) (5) (7) (7) (3) (2) (1) (1).png b/docs/.gitbook/assets/feast-landing-page-blog-post-page-5 (1) (1) (1) (2) (2) (5) (7) (7) (3) (2) (1) (1).png new file mode 100644 index 0000000000..28efa1206a Binary files /dev/null and b/docs/.gitbook/assets/feast-landing-page-blog-post-page-5 (1) (1) (1) (2) (2) (5) (7) (7) (3) (2) (1) (1).png differ diff --git a/docs/.gitbook/assets/feast-landing-page-blog-post-page-5 (1) (1) (1) (2) (2) (5) (7) (7) (3) (2) (1) (2).png b/docs/.gitbook/assets/feast-landing-page-blog-post-page-5 (1) (1) (1) (2) (2) (5) (7) (7) (3) (2) (1) (2).png new file mode 100644 index 0000000000..28efa1206a Binary files /dev/null and b/docs/.gitbook/assets/feast-landing-page-blog-post-page-5 (1) (1) (1) (2) (2) (5) (7) (7) (3) (2) (1) (2).png differ diff --git a/docs/.gitbook/assets/feast-landing-page-blog-post-page-5 (1) (1) (1) (2) (2) (5) (7) (7) (3) (2) (1) (3).png b/docs/.gitbook/assets/feast-landing-page-blog-post-page-5 (1) (1) (1) (2) (2) (5) (7) (7) (3) (2) (1) (3).png new file mode 100644 index 0000000000..28efa1206a Binary files /dev/null and b/docs/.gitbook/assets/feast-landing-page-blog-post-page-5 (1) (1) (1) (2) (2) (5) (7) (7) (3) (2) (1) (3).png differ diff --git a/docs/.gitbook/assets/feast-landing-page-blog-post-page-5 (1) (1) (1) (2) (2) (5) (7) (7) (3) (2) (1) (4).png b/docs/.gitbook/assets/feast-landing-page-blog-post-page-5 (1) (1) (1) (2) (2) (5) (7) (7) (3) (2) (1) (4).png new file mode 100644 index 0000000000..28efa1206a Binary files /dev/null and b/docs/.gitbook/assets/feast-landing-page-blog-post-page-5 (1) (1) (1) (2) (2) (5) (7) (7) (3) (2) (1) (4).png differ diff --git a/docs/.gitbook/assets/feast-landing-page-blog-post-page-5 (1) (1) (1) (2) (2) (5) (7) (7) (3) (2) (1) (5).png b/docs/.gitbook/assets/feast-landing-page-blog-post-page-5 (1) (1) (1) (2) (2) (5) (7) (7) (3) (2) (1) (5).png new file mode 100644 index 0000000000..28efa1206a Binary files /dev/null and b/docs/.gitbook/assets/feast-landing-page-blog-post-page-5 (1) (1) (1) (2) (2) (5) (7) (7) (3) (2) (1) (5).png differ diff --git a/docs/.gitbook/assets/feast-landing-page-blog-post-page-5 (1) (1) (1) (2) (2) (5) (7) (7) (3) (2) (1) (6).png b/docs/.gitbook/assets/feast-landing-page-blog-post-page-5 (1) (1) (1) (2) (2) (5) (7) (7) (3) (2) (1) (6).png new file mode 100644 index 0000000000..28efa1206a Binary files /dev/null and b/docs/.gitbook/assets/feast-landing-page-blog-post-page-5 (1) (1) (1) (2) (2) (5) (7) (7) (3) (2) (1) (6).png differ diff --git a/docs/.gitbook/assets/feast-landing-page-blog-post-page-5 (1) (1) (1) (2) (2) (5) (7) (7) (3) (2) (1).png b/docs/.gitbook/assets/feast-landing-page-blog-post-page-5 (1) (1) (1) (2) (2) (5) (7) (7) (3) (2) (1).png new file mode 100644 index 0000000000..28efa1206a Binary files /dev/null and b/docs/.gitbook/assets/feast-landing-page-blog-post-page-5 (1) (1) (1) (2) (2) (5) (7) (7) (3) (2) (1).png differ diff --git a/docs/.gitbook/assets/feast-landing-page-blog-post-page-5 (1) (1) (1) (2) (2) (5) (7) (7) (3) (2) (2).png b/docs/.gitbook/assets/feast-landing-page-blog-post-page-5 (1) (1) (1) (2) (2) (5) (7) (7) (3) (2) (2).png new file mode 100644 index 0000000000..28efa1206a Binary files /dev/null and b/docs/.gitbook/assets/feast-landing-page-blog-post-page-5 (1) (1) (1) (2) (2) (5) (7) (7) (3) (2) (2).png differ diff --git a/docs/.gitbook/assets/feast-landing-page-blog-post-page-5 (1) (1) (1) (2) (2) (5) (7) (7) (3) (2) (3).png b/docs/.gitbook/assets/feast-landing-page-blog-post-page-5 (1) (1) (1) (2) (2) (5) (7) (7) (3) (2) (3).png new file mode 100644 index 0000000000..28efa1206a Binary files /dev/null and b/docs/.gitbook/assets/feast-landing-page-blog-post-page-5 (1) (1) (1) (2) (2) (5) (7) (7) (3) (2) (3).png differ diff --git a/docs/.gitbook/assets/feast-landing-page-blog-post-page-5 (1) (1) (1) (2) (2) (5) (7) (7) (3) (2) (4).png b/docs/.gitbook/assets/feast-landing-page-blog-post-page-5 (1) (1) (1) (2) (2) (5) (7) (7) (3) (2) (4).png new file mode 100644 index 0000000000..28efa1206a Binary files /dev/null and b/docs/.gitbook/assets/feast-landing-page-blog-post-page-5 (1) (1) (1) (2) (2) (5) (7) (7) (3) (2) (4).png differ diff --git a/docs/.gitbook/assets/feast-landing-page-blog-post-page-5 (1) (1) (1) (2) (2) (5) (7) (7) (3) (2) (5).png b/docs/.gitbook/assets/feast-landing-page-blog-post-page-5 (1) (1) (1) (2) (2) (5) (7) (7) (3) (2) (5).png new file mode 100644 index 0000000000..28efa1206a Binary files /dev/null and b/docs/.gitbook/assets/feast-landing-page-blog-post-page-5 (1) (1) (1) (2) (2) (5) (7) (7) (3) (2) (5).png differ diff --git a/docs/.gitbook/assets/feast-landing-page-blog-post-page-5 (1) (1) (1) (2) (2) (5) (7) (7) (3) (2) (6).png b/docs/.gitbook/assets/feast-landing-page-blog-post-page-5 (1) (1) (1) (2) (2) (5) (7) (7) (3) (2) (6).png new file mode 100644 index 0000000000..28efa1206a Binary files /dev/null and b/docs/.gitbook/assets/feast-landing-page-blog-post-page-5 (1) (1) (1) (2) (2) (5) (7) (7) (3) (2) (6).png differ diff --git a/docs/.gitbook/assets/feast-marchitecture-211014.png b/docs/.gitbook/assets/feast-marchitecture-211014.png new file mode 100644 index 0000000000..7ae82da021 Binary files /dev/null and b/docs/.gitbook/assets/feast-marchitecture-211014.png differ diff --git a/docs/.gitbook/assets/feast-on-aws-3- (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (1) (1).png b/docs/.gitbook/assets/feast-on-aws-3- (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (1) (1).png new file mode 100644 index 0000000000..e6de77dde9 Binary files /dev/null and b/docs/.gitbook/assets/feast-on-aws-3- (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (1) (1).png differ diff --git a/docs/.gitbook/assets/feast-on-aws-3- (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (1) (2).png b/docs/.gitbook/assets/feast-on-aws-3- (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (1) (2).png new file mode 100644 index 0000000000..e6de77dde9 Binary files /dev/null and b/docs/.gitbook/assets/feast-on-aws-3- (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (1) (2).png differ diff --git a/docs/.gitbook/assets/feast-on-aws-3- (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (1) (3).png b/docs/.gitbook/assets/feast-on-aws-3- (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (1) (3).png new file mode 100644 index 0000000000..e6de77dde9 Binary files /dev/null and b/docs/.gitbook/assets/feast-on-aws-3- (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (1) (3).png differ diff --git a/docs/.gitbook/assets/feast-on-aws-3- (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (1) (4).png b/docs/.gitbook/assets/feast-on-aws-3- (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (1) (4).png new file mode 100644 index 0000000000..e6de77dde9 Binary files /dev/null and b/docs/.gitbook/assets/feast-on-aws-3- (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (1) (4).png differ diff --git a/docs/.gitbook/assets/feast-on-aws-3- (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (1) (5).png b/docs/.gitbook/assets/feast-on-aws-3- (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (1) (5).png new file mode 100644 index 0000000000..e6de77dde9 Binary files /dev/null and b/docs/.gitbook/assets/feast-on-aws-3- (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (1) (5).png differ diff --git a/docs/.gitbook/assets/feast-on-aws-3- (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (1) (6).png b/docs/.gitbook/assets/feast-on-aws-3- (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (1) (6).png new file mode 100644 index 0000000000..e6de77dde9 Binary files /dev/null and b/docs/.gitbook/assets/feast-on-aws-3- (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (1) (6).png differ diff --git a/docs/.gitbook/assets/feast-on-aws-3- (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (1).png b/docs/.gitbook/assets/feast-on-aws-3- (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (1).png new file mode 100644 index 0000000000..e6de77dde9 Binary files /dev/null and b/docs/.gitbook/assets/feast-on-aws-3- (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (1).png differ diff --git a/docs/.gitbook/assets/feast-on-aws-3- (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2).png b/docs/.gitbook/assets/feast-on-aws-3- (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2).png new file mode 100644 index 0000000000..e6de77dde9 Binary files /dev/null and b/docs/.gitbook/assets/feast-on-aws-3- (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2).png differ diff --git a/docs/.gitbook/assets/feast-on-aws-3- (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3).png b/docs/.gitbook/assets/feast-on-aws-3- (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3).png new file mode 100644 index 0000000000..e6de77dde9 Binary files /dev/null and b/docs/.gitbook/assets/feast-on-aws-3- (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3).png differ diff --git a/docs/.gitbook/assets/feast-on-aws-3- (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (4).png b/docs/.gitbook/assets/feast-on-aws-3- (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (4).png new file mode 100644 index 0000000000..e6de77dde9 Binary files /dev/null and b/docs/.gitbook/assets/feast-on-aws-3- (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (4).png differ diff --git a/docs/.gitbook/assets/feast-on-aws-3- (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (5).png b/docs/.gitbook/assets/feast-on-aws-3- (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (5).png new file mode 100644 index 0000000000..e6de77dde9 Binary files /dev/null and b/docs/.gitbook/assets/feast-on-aws-3- (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (5).png differ diff --git a/docs/.gitbook/assets/feast-on-aws-3- (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (6).png b/docs/.gitbook/assets/feast-on-aws-3- (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (6).png new file mode 100644 index 0000000000..e6de77dde9 Binary files /dev/null and b/docs/.gitbook/assets/feast-on-aws-3- (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (6).png differ diff --git a/docs/.gitbook/assets/feast-on-aws-3- (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (6).png b/docs/.gitbook/assets/feast-on-aws-3- (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (6).png new file mode 100644 index 0000000000..e6de77dde9 Binary files /dev/null and b/docs/.gitbook/assets/feast-on-aws-3- (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (6).png differ diff --git a/docs/.gitbook/assets/feast-on-aws-3- (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (7).png b/docs/.gitbook/assets/feast-on-aws-3- (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (7).png new file mode 100644 index 0000000000..e6de77dde9 Binary files /dev/null and b/docs/.gitbook/assets/feast-on-aws-3- (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (7).png differ diff --git a/docs/.gitbook/assets/feast_fraudlent_architecture.png b/docs/.gitbook/assets/feast_fraudlent_architecture.png new file mode 100644 index 0000000000..a2fef58e8d Binary files /dev/null and b/docs/.gitbook/assets/feast_fraudlent_architecture.png differ diff --git a/docs/.gitbook/assets/github-mark-32px.png b/docs/.gitbook/assets/github-mark-32px.png new file mode 100644 index 0000000000..8b25551a97 Binary files /dev/null and b/docs/.gitbook/assets/github-mark-32px.png differ diff --git a/docs/.gitbook/assets/high_level_hierarchy_redis.png b/docs/.gitbook/assets/high_level_hierarchy_redis.png new file mode 100644 index 0000000000..8227a9450b Binary files /dev/null and b/docs/.gitbook/assets/high_level_hierarchy_redis.png differ diff --git a/docs/.gitbook/assets/image (10).png b/docs/.gitbook/assets/image (10).png new file mode 100644 index 0000000000..9cf06044d0 Binary files /dev/null and b/docs/.gitbook/assets/image (10).png differ diff --git a/docs/.gitbook/assets/image (19).png b/docs/.gitbook/assets/image (19).png new file mode 100644 index 0000000000..37de128ea6 Binary files /dev/null and b/docs/.gitbook/assets/image (19).png differ diff --git a/docs/.gitbook/assets/image (2) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (2) (1) (1).png b/docs/.gitbook/assets/image (2) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (2) (1) (1).png new file mode 100644 index 0000000000..d3b359a598 Binary files /dev/null and b/docs/.gitbook/assets/image (2) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (2) (1) (1).png differ diff --git a/docs/.gitbook/assets/image (2) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (2) (1) (2).png b/docs/.gitbook/assets/image (2) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (2) (1) (2).png new file mode 100644 index 0000000000..d3b359a598 Binary files /dev/null and b/docs/.gitbook/assets/image (2) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (2) (1) (2).png differ diff --git a/docs/.gitbook/assets/image (2) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (2) (1).png b/docs/.gitbook/assets/image (2) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (2) (1).png new file mode 100644 index 0000000000..d3b359a598 Binary files /dev/null and b/docs/.gitbook/assets/image (2) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (2) (1).png differ diff --git a/docs/.gitbook/assets/image (2) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (2) (2).png b/docs/.gitbook/assets/image (2) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (2) (2).png new file mode 100644 index 0000000000..d3b359a598 Binary files /dev/null and b/docs/.gitbook/assets/image (2) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (2) (2).png differ diff --git a/docs/.gitbook/assets/image (20).png b/docs/.gitbook/assets/image (20).png new file mode 100644 index 0000000000..4c30e96227 Binary files /dev/null and b/docs/.gitbook/assets/image (20).png differ diff --git a/docs/.gitbook/assets/image (21).png b/docs/.gitbook/assets/image (21).png new file mode 100644 index 0000000000..712271a529 Binary files /dev/null and b/docs/.gitbook/assets/image (21).png differ diff --git a/docs/.gitbook/assets/image (22).png b/docs/.gitbook/assets/image (22).png new file mode 100644 index 0000000000..1ed8358266 Binary files /dev/null and b/docs/.gitbook/assets/image (22).png differ diff --git a/docs/.gitbook/assets/image (23).png b/docs/.gitbook/assets/image (23).png new file mode 100644 index 0000000000..f6a96bee66 Binary files /dev/null and b/docs/.gitbook/assets/image (23).png differ diff --git a/docs/.gitbook/assets/image (24).png b/docs/.gitbook/assets/image (24).png new file mode 100644 index 0000000000..31dce0c62e Binary files /dev/null and b/docs/.gitbook/assets/image (24).png differ diff --git a/docs/.gitbook/assets/image (25).png b/docs/.gitbook/assets/image (25).png new file mode 100644 index 0000000000..4546476ad3 Binary files /dev/null and b/docs/.gitbook/assets/image (25).png differ diff --git a/docs/.gitbook/assets/image (26).png b/docs/.gitbook/assets/image (26).png new file mode 100644 index 0000000000..f0aeb9cebf Binary files /dev/null and b/docs/.gitbook/assets/image (26).png differ diff --git a/docs/.gitbook/assets/image (27).png b/docs/.gitbook/assets/image (27).png new file mode 100644 index 0000000000..25de15c266 Binary files /dev/null and b/docs/.gitbook/assets/image (27).png differ diff --git a/docs/.gitbook/assets/image (28).png b/docs/.gitbook/assets/image (28).png new file mode 100644 index 0000000000..fd99986480 Binary files /dev/null and b/docs/.gitbook/assets/image (28).png differ diff --git a/docs/.gitbook/assets/image (29).png b/docs/.gitbook/assets/image (29).png new file mode 100644 index 0000000000..ae51a9c072 Binary files /dev/null and b/docs/.gitbook/assets/image (29).png differ diff --git a/docs/.gitbook/assets/image (3) (2) (2) (4) (4) (4) (4) (4) (4) (3) (1) (2) (1) (1).png b/docs/.gitbook/assets/image (3) (2) (2) (4) (4) (4) (4) (4) (4) (3) (1) (2) (1) (1).png new file mode 100644 index 0000000000..2442410112 Binary files /dev/null and b/docs/.gitbook/assets/image (3) (2) (2) (4) (4) (4) (4) (4) (4) (3) (1) (2) (1) (1).png differ diff --git a/docs/.gitbook/assets/image (3) (2) (2) (4) (4) (4) (4) (4) (4) (3) (1) (2) (1) (2).png b/docs/.gitbook/assets/image (3) (2) (2) (4) (4) (4) (4) (4) (4) (3) (1) (2) (1) (2).png new file mode 100644 index 0000000000..2442410112 Binary files /dev/null and b/docs/.gitbook/assets/image (3) (2) (2) (4) (4) (4) (4) (4) (4) (3) (1) (2) (1) (2).png differ diff --git a/docs/.gitbook/assets/image (3) (2) (2) (4) (4) (4) (4) (4) (4) (3) (1) (2) (1) (3).png b/docs/.gitbook/assets/image (3) (2) (2) (4) (4) (4) (4) (4) (4) (3) (1) (2) (1) (3).png new file mode 100644 index 0000000000..2442410112 Binary files /dev/null and b/docs/.gitbook/assets/image (3) (2) (2) (4) (4) (4) (4) (4) (4) (3) (1) (2) (1) (3).png differ diff --git a/docs/.gitbook/assets/image (3) (2) (2) (4) (4) (4) (4) (4) (4) (3) (1) (2) (1).png b/docs/.gitbook/assets/image (3) (2) (2) (4) (4) (4) (4) (4) (4) (3) (1) (2) (1).png new file mode 100644 index 0000000000..2442410112 Binary files /dev/null and b/docs/.gitbook/assets/image (3) (2) (2) (4) (4) (4) (4) (4) (4) (3) (1) (2) (1).png differ diff --git a/docs/.gitbook/assets/image (3) (2) (2) (4) (4) (4) (4) (4) (4) (3) (1) (2) (2).png b/docs/.gitbook/assets/image (3) (2) (2) (4) (4) (4) (4) (4) (4) (3) (1) (2) (2).png new file mode 100644 index 0000000000..2442410112 Binary files /dev/null and b/docs/.gitbook/assets/image (3) (2) (2) (4) (4) (4) (4) (4) (4) (3) (1) (2) (2).png differ diff --git a/docs/.gitbook/assets/image (3) (2) (2) (4) (4) (4) (4) (4) (4) (3) (1) (2) (3).png b/docs/.gitbook/assets/image (3) (2) (2) (4) (4) (4) (4) (4) (4) (3) (1) (2) (3).png new file mode 100644 index 0000000000..2442410112 Binary files /dev/null and b/docs/.gitbook/assets/image (3) (2) (2) (4) (4) (4) (4) (4) (4) (3) (1) (2) (3).png differ diff --git a/docs/.gitbook/assets/image (30).png b/docs/.gitbook/assets/image (30).png new file mode 100644 index 0000000000..6c91826a63 Binary files /dev/null and b/docs/.gitbook/assets/image (30).png differ diff --git a/docs/.gitbook/assets/image (31).png b/docs/.gitbook/assets/image (31).png new file mode 100644 index 0000000000..94853e3109 Binary files /dev/null and b/docs/.gitbook/assets/image (31).png differ diff --git a/docs/.gitbook/assets/image (32).png b/docs/.gitbook/assets/image (32).png new file mode 100644 index 0000000000..82ba62ee07 Binary files /dev/null and b/docs/.gitbook/assets/image (32).png differ diff --git a/docs/.gitbook/assets/image (33).png b/docs/.gitbook/assets/image (33).png new file mode 100644 index 0000000000..491cbb00da Binary files /dev/null and b/docs/.gitbook/assets/image (33).png differ diff --git a/docs/.gitbook/assets/image (34).png b/docs/.gitbook/assets/image (34).png new file mode 100644 index 0000000000..3e03757900 Binary files /dev/null and b/docs/.gitbook/assets/image (34).png differ diff --git a/docs/.gitbook/assets/image (35).png b/docs/.gitbook/assets/image (35).png new file mode 100644 index 0000000000..92508a09cd Binary files /dev/null and b/docs/.gitbook/assets/image (35).png differ diff --git a/docs/.gitbook/assets/image (36).png b/docs/.gitbook/assets/image (36).png new file mode 100644 index 0000000000..adce5fd967 Binary files /dev/null and b/docs/.gitbook/assets/image (36).png differ diff --git a/docs/.gitbook/assets/image (37).png b/docs/.gitbook/assets/image (37).png new file mode 100644 index 0000000000..205d326c36 Binary files /dev/null and b/docs/.gitbook/assets/image (37).png differ diff --git a/docs/.gitbook/assets/image (4) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3) (3) (3) (3) (2) (1) (1).png b/docs/.gitbook/assets/image (4) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3) (3) (3) (3) (2) (1) (1).png new file mode 100644 index 0000000000..cd77f27cc4 Binary files /dev/null and b/docs/.gitbook/assets/image (4) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3) (3) (3) (3) (2) (1) (1).png differ diff --git a/docs/.gitbook/assets/image (4) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3) (3) (3) (3) (2) (1) (2).png b/docs/.gitbook/assets/image (4) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3) (3) (3) (3) (2) (1) (2).png new file mode 100644 index 0000000000..cd77f27cc4 Binary files /dev/null and b/docs/.gitbook/assets/image (4) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3) (3) (3) (3) (2) (1) (2).png differ diff --git a/docs/.gitbook/assets/image (4) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3) (3) (3) (3) (2) (1) (3).png b/docs/.gitbook/assets/image (4) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3) (3) (3) (3) (2) (1) (3).png new file mode 100644 index 0000000000..cd77f27cc4 Binary files /dev/null and b/docs/.gitbook/assets/image (4) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3) (3) (3) (3) (2) (1) (3).png differ diff --git a/docs/.gitbook/assets/image (4) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3) (3) (3) (3) (2) (1) (4).png b/docs/.gitbook/assets/image (4) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3) (3) (3) (3) (2) (1) (4).png new file mode 100644 index 0000000000..cd77f27cc4 Binary files /dev/null and b/docs/.gitbook/assets/image (4) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3) (3) (3) (3) (2) (1) (4).png differ diff --git a/docs/.gitbook/assets/image (4) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3) (3) (3) (3) (2) (1) (5).png b/docs/.gitbook/assets/image (4) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3) (3) (3) (3) (2) (1) (5).png new file mode 100644 index 0000000000..cd77f27cc4 Binary files /dev/null and b/docs/.gitbook/assets/image (4) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3) (3) (3) (3) (2) (1) (5).png differ diff --git a/docs/.gitbook/assets/image (4) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3) (3) (3) (3) (2) (1) (6).png b/docs/.gitbook/assets/image (4) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3) (3) (3) (3) (2) (1) (6).png new file mode 100644 index 0000000000..cd77f27cc4 Binary files /dev/null and b/docs/.gitbook/assets/image (4) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3) (3) (3) (3) (2) (1) (6).png differ diff --git a/docs/.gitbook/assets/image (4) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3) (3) (3) (3) (2) (1).png b/docs/.gitbook/assets/image (4) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3) (3) (3) (3) (2) (1).png new file mode 100644 index 0000000000..cd77f27cc4 Binary files /dev/null and b/docs/.gitbook/assets/image (4) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3) (3) (3) (3) (2) (1).png differ diff --git a/docs/.gitbook/assets/image (4) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3) (3) (3) (3) (2) (2).png b/docs/.gitbook/assets/image (4) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3) (3) (3) (3) (2) (2).png new file mode 100644 index 0000000000..cd77f27cc4 Binary files /dev/null and b/docs/.gitbook/assets/image (4) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3) (3) (3) (3) (2) (2).png differ diff --git a/docs/.gitbook/assets/image (4) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3) (3) (3) (3) (2) (3).png b/docs/.gitbook/assets/image (4) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3) (3) (3) (3) (2) (3).png new file mode 100644 index 0000000000..cd77f27cc4 Binary files /dev/null and b/docs/.gitbook/assets/image (4) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3) (3) (3) (3) (2) (3).png differ diff --git a/docs/.gitbook/assets/image (4) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3) (3) (3) (3) (2) (4).png b/docs/.gitbook/assets/image (4) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3) (3) (3) (3) (2) (4).png new file mode 100644 index 0000000000..cd77f27cc4 Binary files /dev/null and b/docs/.gitbook/assets/image (4) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3) (3) (3) (3) (2) (4).png differ diff --git a/docs/.gitbook/assets/image (4) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3) (3) (3) (3) (2) (5).png b/docs/.gitbook/assets/image (4) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3) (3) (3) (3) (2) (5).png new file mode 100644 index 0000000000..cd77f27cc4 Binary files /dev/null and b/docs/.gitbook/assets/image (4) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3) (3) (3) (3) (2) (5).png differ diff --git a/docs/.gitbook/assets/image (4) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3) (3) (3) (3) (2) (6).png b/docs/.gitbook/assets/image (4) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3) (3) (3) (3) (2) (6).png new file mode 100644 index 0000000000..cd77f27cc4 Binary files /dev/null and b/docs/.gitbook/assets/image (4) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3) (3) (3) (3) (2) (6).png differ diff --git a/docs/.gitbook/assets/image (4) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3) (3) (3) (3) (6).png b/docs/.gitbook/assets/image (4) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3) (3) (3) (3) (6).png new file mode 100644 index 0000000000..cd77f27cc4 Binary files /dev/null and b/docs/.gitbook/assets/image (4) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3) (3) (3) (3) (6).png differ diff --git a/docs/.gitbook/assets/image (4) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3) (3) (3) (3) (7).png b/docs/.gitbook/assets/image (4) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3) (3) (3) (3) (7).png new file mode 100644 index 0000000000..cd77f27cc4 Binary files /dev/null and b/docs/.gitbook/assets/image (4) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (2) (3) (3) (3) (3) (7).png differ diff --git a/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2) (1) (1).png b/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2) (1) (1).png new file mode 100644 index 0000000000..49670e2005 Binary files /dev/null and b/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2) (1) (1).png differ diff --git a/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2) (1) (2).png b/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2) (1) (2).png new file mode 100644 index 0000000000..49670e2005 Binary files /dev/null and b/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2) (1) (2).png differ diff --git a/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2) (1) (3).png b/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2) (1) (3).png new file mode 100644 index 0000000000..49670e2005 Binary files /dev/null and b/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2) (1) (3).png differ diff --git a/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2) (1) (4).png b/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2) (1) (4).png new file mode 100644 index 0000000000..49670e2005 Binary files /dev/null and b/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2) (1) (4).png differ diff --git a/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2) (1) (5).png b/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2) (1) (5).png new file mode 100644 index 0000000000..49670e2005 Binary files /dev/null and b/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2) (1) (5).png differ diff --git a/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2) (1) (6).png b/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2) (1) (6).png new file mode 100644 index 0000000000..49670e2005 Binary files /dev/null and b/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2) (1) (6).png differ diff --git a/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2) (1) (7).png b/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2) (1) (7).png new file mode 100644 index 0000000000..49670e2005 Binary files /dev/null and b/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2) (1) (7).png differ diff --git a/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2) (1).png b/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2) (1).png new file mode 100644 index 0000000000..49670e2005 Binary files /dev/null and b/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2) (1).png differ diff --git a/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2) (2).png b/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2) (2).png new file mode 100644 index 0000000000..49670e2005 Binary files /dev/null and b/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2) (2).png differ diff --git a/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2) (3).png b/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2) (3).png new file mode 100644 index 0000000000..49670e2005 Binary files /dev/null and b/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2) (3).png differ diff --git a/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2) (4).png b/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2) (4).png new file mode 100644 index 0000000000..49670e2005 Binary files /dev/null and b/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2) (4).png differ diff --git a/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2) (5).png b/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2) (5).png new file mode 100644 index 0000000000..49670e2005 Binary files /dev/null and b/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2) (5).png differ diff --git a/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2) (6).png b/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2) (6).png new file mode 100644 index 0000000000..49670e2005 Binary files /dev/null and b/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2) (6).png differ diff --git a/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2) (7).png b/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2) (7).png new file mode 100644 index 0000000000..49670e2005 Binary files /dev/null and b/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2) (7).png differ diff --git a/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (8).png b/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (8).png new file mode 100644 index 0000000000..49670e2005 Binary files /dev/null and b/docs/.gitbook/assets/image (6) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (8).png differ diff --git a/docs/.gitbook/assets/image (9).png b/docs/.gitbook/assets/image (9).png new file mode 100644 index 0000000000..79dbc2679f Binary files /dev/null and b/docs/.gitbook/assets/image (9).png differ diff --git a/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (2) (1) (1).png b/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (2) (1) (1).png new file mode 100644 index 0000000000..331a090d71 Binary files /dev/null and b/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (2) (1) (1).png differ diff --git a/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (2) (1) (2).png b/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (2) (1) (2).png new file mode 100644 index 0000000000..331a090d71 Binary files /dev/null and b/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (2) (1) (2).png differ diff --git a/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (2) (1) (3).png b/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (2) (1) (3).png new file mode 100644 index 0000000000..331a090d71 Binary files /dev/null and b/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (2) (1) (3).png differ diff --git a/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (2) (1) (4).png b/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (2) (1) (4).png new file mode 100644 index 0000000000..331a090d71 Binary files /dev/null and b/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (2) (1) (4).png differ diff --git a/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (2) (1) (5).png b/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (2) (1) (5).png new file mode 100644 index 0000000000..331a090d71 Binary files /dev/null and b/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (2) (1) (5).png differ diff --git a/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (2) (1) (6).png b/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (2) (1) (6).png new file mode 100644 index 0000000000..331a090d71 Binary files /dev/null and b/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (2) (1) (6).png differ diff --git a/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (2) (1) (7).png b/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (2) (1) (7).png new file mode 100644 index 0000000000..331a090d71 Binary files /dev/null and b/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (2) (1) (7).png differ diff --git a/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (2) (1).png b/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (2) (1).png new file mode 100644 index 0000000000..331a090d71 Binary files /dev/null and b/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (2) (1).png differ diff --git a/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (2) (2).png b/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (2) (2).png new file mode 100644 index 0000000000..331a090d71 Binary files /dev/null and b/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (2) (2).png differ diff --git a/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (2) (3).png b/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (2) (3).png new file mode 100644 index 0000000000..331a090d71 Binary files /dev/null and b/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (2) (3).png differ diff --git a/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (2) (4).png b/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (2) (4).png new file mode 100644 index 0000000000..331a090d71 Binary files /dev/null and b/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (2) (4).png differ diff --git a/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (2) (5).png b/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (2) (5).png new file mode 100644 index 0000000000..331a090d71 Binary files /dev/null and b/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (2) (5).png differ diff --git a/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (2) (6).png b/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (2) (6).png new file mode 100644 index 0000000000..331a090d71 Binary files /dev/null and b/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (2) (6).png differ diff --git a/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (2) (7).png b/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (2) (7).png new file mode 100644 index 0000000000..331a090d71 Binary files /dev/null and b/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (2) (7).png differ diff --git a/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (7).png b/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (7).png new file mode 100644 index 0000000000..331a090d71 Binary files /dev/null and b/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (7).png differ diff --git a/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (8).png b/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (8).png new file mode 100644 index 0000000000..331a090d71 Binary files /dev/null and b/docs/.gitbook/assets/point_in_time_join (1) (2) (2) (3) (3) (3) (3) (8).png differ diff --git a/docs/.gitbook/assets/redis_online_example (1).png b/docs/.gitbook/assets/redis_online_example (1).png new file mode 100644 index 0000000000..ac426770ba Binary files /dev/null and b/docs/.gitbook/assets/redis_online_example (1).png differ diff --git a/docs/.gitbook/assets/redis_online_example.png b/docs/.gitbook/assets/redis_online_example.png new file mode 100644 index 0000000000..ac426770ba Binary files /dev/null and b/docs/.gitbook/assets/redis_online_example.png differ diff --git a/docs/.gitbook/assets/rsz_untitled23 (2) (2) (2) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (1).jpg b/docs/.gitbook/assets/rsz_untitled23 (2) (2) (2) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (1).jpg new file mode 100644 index 0000000000..b92ec6fed7 Binary files /dev/null and b/docs/.gitbook/assets/rsz_untitled23 (2) (2) (2) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (1).jpg differ diff --git a/docs/.gitbook/assets/rsz_untitled23 (2) (2) (2) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2).jpg b/docs/.gitbook/assets/rsz_untitled23 (2) (2) (2) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2).jpg new file mode 100644 index 0000000000..b92ec6fed7 Binary files /dev/null and b/docs/.gitbook/assets/rsz_untitled23 (2) (2) (2) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (2).jpg differ diff --git a/docs/.gitbook/assets/rsz_untitled23 (2) (2) (2) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (3).jpg b/docs/.gitbook/assets/rsz_untitled23 (2) (2) (2) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (3).jpg new file mode 100644 index 0000000000..b92ec6fed7 Binary files /dev/null and b/docs/.gitbook/assets/rsz_untitled23 (2) (2) (2) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1) (3).jpg differ diff --git a/docs/.gitbook/assets/rsz_untitled23 (2) (2) (2) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1).jpg b/docs/.gitbook/assets/rsz_untitled23 (2) (2) (2) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1).jpg new file mode 100644 index 0000000000..b92ec6fed7 Binary files /dev/null and b/docs/.gitbook/assets/rsz_untitled23 (2) (2) (2) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (1).jpg differ diff --git a/docs/.gitbook/assets/rsz_untitled23 (2) (2) (2) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (2).jpg b/docs/.gitbook/assets/rsz_untitled23 (2) (2) (2) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (2).jpg new file mode 100644 index 0000000000..b92ec6fed7 Binary files /dev/null and b/docs/.gitbook/assets/rsz_untitled23 (2) (2) (2) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (2).jpg differ diff --git a/docs/.gitbook/assets/rsz_untitled23 (2) (2) (2) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (3).jpg b/docs/.gitbook/assets/rsz_untitled23 (2) (2) (2) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (3).jpg new file mode 100644 index 0000000000..b92ec6fed7 Binary files /dev/null and b/docs/.gitbook/assets/rsz_untitled23 (2) (2) (2) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (3) (2) (3).jpg differ diff --git a/docs/.gitbook/assets/screen-shot-2021-08-23-at-2.35.18-pm.png b/docs/.gitbook/assets/screen-shot-2021-08-23-at-2.35.18-pm.png new file mode 100644 index 0000000000..b2961c773e Binary files /dev/null and b/docs/.gitbook/assets/screen-shot-2021-08-23-at-2.35.18-pm.png differ diff --git a/docs/assets/statistics-sources.png b/docs/.gitbook/assets/statistics-sources (2) (1) (1).png similarity index 100% rename from docs/assets/statistics-sources.png rename to docs/.gitbook/assets/statistics-sources (2) (1) (1).png diff --git a/docs/.gitbook/assets/statistics-sources (2) (1) (2).png b/docs/.gitbook/assets/statistics-sources (2) (1) (2).png new file mode 100644 index 0000000000..02be233968 Binary files /dev/null and b/docs/.gitbook/assets/statistics-sources (2) (1) (2).png differ diff --git a/docs/.gitbook/assets/statistics-sources (2) (1) (3).png b/docs/.gitbook/assets/statistics-sources (2) (1) (3).png new file mode 100644 index 0000000000..02be233968 Binary files /dev/null and b/docs/.gitbook/assets/statistics-sources (2) (1) (3).png differ diff --git a/docs/.gitbook/assets/statistics-sources (2) (1) (4).png b/docs/.gitbook/assets/statistics-sources (2) (1) (4).png new file mode 100644 index 0000000000..02be233968 Binary files /dev/null and b/docs/.gitbook/assets/statistics-sources (2) (1) (4).png differ diff --git a/docs/.gitbook/assets/statistics-sources (2) (1).png b/docs/.gitbook/assets/statistics-sources (2) (1).png new file mode 100644 index 0000000000..02be233968 Binary files /dev/null and b/docs/.gitbook/assets/statistics-sources (2) (1).png differ diff --git a/docs/.gitbook/assets/statistics-sources (2) (2).png b/docs/.gitbook/assets/statistics-sources (2) (2).png new file mode 100644 index 0000000000..02be233968 Binary files /dev/null and b/docs/.gitbook/assets/statistics-sources (2) (2).png differ diff --git a/docs/.gitbook/assets/statistics-sources (2) (3).png b/docs/.gitbook/assets/statistics-sources (2) (3).png new file mode 100644 index 0000000000..02be233968 Binary files /dev/null and b/docs/.gitbook/assets/statistics-sources (2) (3).png differ diff --git a/docs/.gitbook/assets/statistics-sources (2) (4).png b/docs/.gitbook/assets/statistics-sources (2) (4).png new file mode 100644 index 0000000000..02be233968 Binary files /dev/null and b/docs/.gitbook/assets/statistics-sources (2) (4).png differ diff --git a/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (2) (1) (1).jpg b/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (2) (1) (1).jpg new file mode 100644 index 0000000000..93d010406b Binary files /dev/null and b/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (2) (1) (1).jpg differ diff --git a/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (2) (1) (2).jpg b/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (2) (1) (2).jpg new file mode 100644 index 0000000000..93d010406b Binary files /dev/null and b/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (2) (1) (2).jpg differ diff --git a/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (2) (1) (3).jpg b/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (2) (1) (3).jpg new file mode 100644 index 0000000000..93d010406b Binary files /dev/null and b/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (2) (1) (3).jpg differ diff --git a/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (2) (1) (4).jpg b/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (2) (1) (4).jpg new file mode 100644 index 0000000000..93d010406b Binary files /dev/null and b/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (2) (1) (4).jpg differ diff --git a/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (2) (1) (5).jpg b/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (2) (1) (5).jpg new file mode 100644 index 0000000000..93d010406b Binary files /dev/null and b/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (2) (1) (5).jpg differ diff --git a/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (2) (1) (6).jpg b/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (2) (1) (6).jpg new file mode 100644 index 0000000000..93d010406b Binary files /dev/null and b/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (2) (1) (6).jpg differ diff --git a/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (2) (1) (7).jpg b/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (2) (1) (7).jpg new file mode 100644 index 0000000000..93d010406b Binary files /dev/null and b/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (2) (1) (7).jpg differ diff --git a/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (2) (1).jpg b/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (2) (1).jpg new file mode 100644 index 0000000000..93d010406b Binary files /dev/null and b/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (2) (1).jpg differ diff --git a/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (2) (2).jpg b/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (2) (2).jpg new file mode 100644 index 0000000000..93d010406b Binary files /dev/null and b/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (2) (2).jpg differ diff --git a/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (2) (3).jpg b/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (2) (3).jpg new file mode 100644 index 0000000000..93d010406b Binary files /dev/null and b/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (2) (3).jpg differ diff --git a/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (2) (4).jpg b/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (2) (4).jpg new file mode 100644 index 0000000000..93d010406b Binary files /dev/null and b/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (2) (4).jpg differ diff --git a/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (2) (5).jpg b/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (2) (5).jpg new file mode 100644 index 0000000000..93d010406b Binary files /dev/null and b/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (2) (5).jpg differ diff --git a/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (2) (6).jpg b/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (2) (6).jpg new file mode 100644 index 0000000000..93d010406b Binary files /dev/null and b/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (2) (6).jpg differ diff --git a/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (2) (7).jpg b/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (2) (7).jpg new file mode 100644 index 0000000000..93d010406b Binary files /dev/null and b/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (2) (7).jpg differ diff --git a/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (7).jpg b/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (7).jpg new file mode 100644 index 0000000000..93d010406b Binary files /dev/null and b/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (7).jpg differ diff --git a/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (8).jpg b/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (8).jpg new file mode 100644 index 0000000000..93d010406b Binary files /dev/null and b/docs/.gitbook/assets/untitled-25-1- (2) (2) (2) (3) (3) (3) (3) (3) (3) (1) (8).jpg differ diff --git a/docs/README.md b/docs/README.md index 28e671e695..f8b9af3c32 100644 --- a/docs/README.md +++ b/docs/README.md @@ -2,19 +2,19 @@ ## What is Feast? -Feast \(**Fea**ture **St**ore\) is an operational data system for managing and serving machine learning features to models in production. +Feast (**Fea**ture **St**ore) is an operational data system for managing and serving machine learning features to models in production. Feast is able to serve feature data to models from a low-latency online store (for real-time prediction) or from an offline store (for scale-out batch scoring or model training). -![](.gitbook/assets/feast_hero_010.png) +![](assets/feast-marchitecture.png) ## Problems Feast Solves -**Models need consistent access to data:** ML systems built on traditional data infrastructure are often coupled to databases, object stores, streams, and files. A result of this coupling, however, is that any change in data infrastructure may break dependent ML systems. Another challenge is that dual implementations of data retrieval for training and serving can lead to inconsistencies in data, which in turn can lead to training-serving skew. +**Models need consistent access to data:** Machine Learning (ML) systems built on traditional data infrastructure are often coupled to databases, object stores, streams, and files. A result of this coupling, however, is that any change in data infrastructure may break dependent ML systems. Another challenge is that dual implementations of data retrieval for training and serving can lead to inconsistencies in data, which in turn can lead to training-serving skew. Feast decouples your models from your data infrastructure by providing a single data access layer that abstracts feature storage from feature retrieval. Feast also provides a consistent means of referencing feature data for retrieval, and therefore ensures that models remain portable when moving from training to serving. **Deploying new features into production is difficult:** Many ML teams consist of members with different objectives. Data scientists, for example, aim to deploy features into production as soon as possible, while engineers want to ensure that production systems remain stable. These differing objectives can create an organizational friction that slows time-to-market for new features. -Feast addresses this friction by providing both a centralized registry to which data scientists can publish features, and a battle-hardened serving layer. Together, these enable non-engineering teams to ship features into production with minimal oversight. +Feast addresses this friction by providing both a centralized registry to which data scientists can publish features and a battle-hardened serving layer. Together, these enable non-engineering teams to ship features into production with minimal oversight. **Models need point-in-time correct data:** ML models in production require a view of data consistent with the one on which they are trained, otherwise the accuracy of these models could be compromised. Despite this need, many data science projects suffer from inconsistencies introduced by future feature values being leaked to models during training. @@ -22,7 +22,7 @@ Feast solves the challenge of data leakage by providing point-in-time correct fe **Features aren't reused across projects:** Different teams within an organization are often unable to reuse features across projects. The siloed nature of development and the monolithic design of end-to-end ML systems contribute to duplication of feature creation and usage across teams and projects. -Feast addresses this problem by introducing feature reuse through a centralized system \(a registry\). This registry enables multiple teams working on different projects not only to contribute features, but also to reuse these same features. With Feast, data scientists can start new ML projects by selecting previously engineered features from a centralized registry, and are no longer required to develop new features for each project. +Feast addresses this problem by introducing feature reuse through a centralized registry. This registry enables multiple teams working on different projects not only to contribute features, but also to reuse these same features. With Feast, data scientists can start new ML projects by selecting previously engineered features from a centralized registry, and are no longer required to develop new features for each project. ## Problems Feast does not yet solve @@ -30,27 +30,28 @@ Feast addresses this problem by introducing feature reuse through a centralized **Feature discovery:** We also aim for Feast to include a first-class user interface for exploring and discovering entities and features. -**‌Feature validation:** We additionally aim for Feast to improve support for statistics generation of feature data and subsequent validation of these statistics. Current support is limited. +**Feature validation:** We additionally aim for Feast to improve support for statistics generation of feature data and subsequent validation of these statistics. Current support is limited. ## What Feast is not -[**ETL**](https://en.wikipedia.org/wiki/Extract,_transform,_load) **or** [**ELT**](https://en.wikipedia.org/wiki/Extract,_load,_transform) **system:** Feast is not \(and does not plan to become\) a general purpose data transformation or pipelining system. Feast plans to include a light-weight feature engineering toolkit, but we encourage teams to integrate Feast with upstream ETL/ELT systems that are specialized in transformation. +[**ETL**](https://en.wikipedia.org/wiki/Extract,\_transform,\_load) **or** [**ELT**](https://en.wikipedia.org/wiki/Extract,\_load,\_transform) **system:** Feast is not (and does not plan to become) a general purpose data transformation or pipelining system. Feast plans to include a light-weight feature engineering toolkit, but we encourage teams to integrate Feast with upstream ETL/ELT systems that are specialized in transformation. -**Data warehouse:** Feast is not a replacement for your data warehouse or the source of truth for all transformed data in your organization. Rather, Feast is a light-weight downstream layer that can serve data from an existing data warehouse \(or other data sources\) to models in production. +**Data warehouse:** Feast is not a replacement for your data warehouse or the source of truth for all transformed data in your organization. Rather, Feast is a light-weight downstream layer that can serve data from an existing data warehouse (or other data sources) to models in production. **Data catalog:** Feast is not a general purpose data catalog for your organization. Feast is purely focused on cataloging features for use in ML pipelines or systems, and only to the extent of facilitating the reuse of features. ## How can I get started? {% hint style="info" %} -The best way to learn Feast is to use it. Head over to our [Quickstart](quickstart.md) and try it out! +The best way to learn Feast is to use it. Head over to our [Quickstart](getting-started/quickstart.md) and try it out! {% endhint %} Explore the following resources to get started with Feast: -* [Quickstart](quickstart.md) is the fastest way to get started with Feast -* [Getting started ](getting-started/)provides a step-by-step guide to using Feast. -* [Concepts](concepts/data-model-and-concepts.md#concepts) describes all important Feast API concepts. +* [Quickstart](getting-started/quickstart.md) is the fastest way to get started with Feast +* [Concepts](getting-started/concepts/) describes all important Feast API concepts +* [Architecture](getting-started/architecture-and-components/) describes Feast's overall architecture. +* [Tutorials](tutorials/tutorials-overview.md) shows full examples of using Feast in machine learning applications. +* [Running Feast with Snowflake/GCP/AWS](how-to-guides/feast-snowflake-gcp-aws/) provides a more in-depth guide to using Feast. * [Reference](reference/feast-cli-commands.md) contains detailed API and design documents. -* [Contributing](contributing/contributing.md) contains resources for anyone who wants to contribute to Feast. - +* [Contributing](project/contributing.md) contains resources for anyone who wants to contribute to Feast. diff --git a/docs/SUMMARY.md b/docs/SUMMARY.md index b5c05f625c..11e20ab831 100644 --- a/docs/SUMMARY.md +++ b/docs/SUMMARY.md @@ -1,105 +1,99 @@ # Table of contents * [Introduction](README.md) -* [Quickstart](quickstart.md) -* [Getting started](getting-started/README.md) - * [Install Feast](getting-started/install-feast.md) - * [Create a feature repository](getting-started/create-a-feature-repository.md) - * [Deploy a feature store](getting-started/deploy-a-feature-store.md) - * [Build a training dataset](getting-started/build-a-training-dataset.md) - * [Load data into the online store](getting-started/load-data-into-the-online-store.md) - * [Read features from the online store](getting-started/read-features-from-the-online-store.md) * [Community](community.md) * [Roadmap](roadmap.md) * [Changelog](https://github.com/feast-dev/feast/blob/master/CHANGELOG.md) -## Concepts +## Getting started -* [Overview](concepts/overview.md) -* [Feature view](concepts/feature-view.md) -* [Data model](concepts/data-model-and-concepts.md) -* [Online store](concepts/online-store.md) -* [Offline store](concepts/offline-store.md) -* [Provider](concepts/provider.md) -* [Architecture](concepts/architecture-and-components.md) +* [Quickstart](getting-started/quickstart.md) +* [Concepts](getting-started/concepts/README.md) + * [Overview](getting-started/concepts/overview.md) + * [Data source](getting-started/concepts/data-source.md) + * [Entity](getting-started/concepts/entity.md) + * [Feature view](getting-started/concepts/feature-view.md) + * [Feature service](getting-started/concepts/feature-service.md) + * [Feature retrieval](getting-started/concepts/feature-retrieval.md) + * [Point-in-time joins](getting-started/concepts/point-in-time-joins.md) + * [Dataset](getting-started/concepts/dataset.md) +* [Architecture](getting-started/architecture-and-components/README.md) + * [Overview](getting-started/architecture-and-components/overview.md) + * [Feature repository](getting-started/architecture-and-components/feature-repository.md) + * [Registry](getting-started/architecture-and-components/registry.md) + * [Offline store](getting-started/architecture-and-components/offline-store.md) + * [Online store](getting-started/architecture-and-components/online-store.md) + * [Provider](getting-started/architecture-and-components/provider.md) +* [Third party integrations](getting-started/third-party-integrations.md) +* [FAQ](getting-started/faq.md) + +## Tutorials + +* [Overview](tutorials/tutorials-overview.md) +* [Driver ranking](tutorials/driver-ranking-with-feast.md) +* [Fraud detection on GCP](tutorials/fraud-detection.md) +* [Real-time credit scoring on AWS](tutorials/real-time-credit-scoring-on-aws.md) +* [Driver stats on Snowflake](tutorials/driver-stats-on-snowflake.md) +* [Validating historical features with Great Expectations](tutorials/validating-historical-features.md) + +## How-to Guides + +* [Running Feast with Snowflake/GCP/AWS](how-to-guides/feast-snowflake-gcp-aws/README.md) + * [Install Feast](how-to-guides/feast-snowflake-gcp-aws/install-feast.md) + * [Create a feature repository](how-to-guides/feast-snowflake-gcp-aws/create-a-feature-repository.md) + * [Deploy a feature store](how-to-guides/feast-snowflake-gcp-aws/deploy-a-feature-store.md) + * [Build a training dataset](how-to-guides/feast-snowflake-gcp-aws/build-a-training-dataset.md) + * [Load data into the online store](how-to-guides/feast-snowflake-gcp-aws/load-data-into-the-online-store.md) + * [Read features from the online store](how-to-guides/feast-snowflake-gcp-aws/read-features-from-the-online-store.md) +* [Running Feast in production](how-to-guides/running-feast-in-production.md) +* [Deploying a Java feature server on Kubernetes](how-to-guides/fetching-java-features-k8s.md) +* [Upgrading from Feast 0.9](https://docs.google.com/document/u/1/d/1AOsr\_baczuARjCpmZgVd8mCqTF4AZ49OEyU4Cn-uTT0/edit) +* [Adding a custom provider](how-to-guides/creating-a-custom-provider.md) +* [Adding a new online store](how-to-guides/adding-support-for-a-new-online-store.md) +* [Adding a new offline store](how-to-guides/adding-a-new-offline-store.md) +* [Adding or reusing tests](how-to-guides/adding-or-reusing-tests.md) ## Reference * [Data sources](reference/data-sources/README.md) - * [BigQuery](reference/data-sources/bigquery.md) * [File](reference/data-sources/file.md) + * [Snowflake](reference/data-sources/snowflake.md) + * [BigQuery](reference/data-sources/bigquery.md) + * [Redshift](reference/data-sources/redshift.md) + * [Spark](reference/data-sources/spark.md) + * [Push](reference/data-sources/push.md) * [Offline stores](reference/offline-stores/README.md) * [File](reference/offline-stores/file.md) - * [BigQuery](reference/offline-stores/untitled.md) + * [Snowflake](reference/offline-stores/snowflake.md) + * [BigQuery](reference/offline-stores/bigquery.md) + * [Redshift](reference/offline-stores/redshift.md) + * [Spark](reference/offline-stores/spark.md) * [Online stores](reference/online-stores/README.md) * [SQLite](reference/online-stores/sqlite.md) * [Redis](reference/online-stores/redis.md) * [Datastore](reference/online-stores/datastore.md) + * [DynamoDB](reference/online-stores/dynamodb.md) * [Providers](reference/providers/README.md) * [Local](reference/providers/local.md) * [Google Cloud Platform](reference/providers/google-cloud-platform.md) + * [Amazon Web Services](reference/providers/amazon-web-services.md) * [Feature repository](reference/feature-repository/README.md) * [feature\_store.yaml](reference/feature-repository/feature-store-yaml.md) * [.feastignore](reference/feature-repository/feast-ignore.md) +* [Feature servers](reference/feature-servers/README.md) + * [Python feature server](reference/feature-servers/python-feature-server.md) + * [Go-based feature retrieval](reference/feature-servers/go-feature-retrieval.md) +* [\[Alpha\] Data quality monitoring](reference/dqm.md) +* [\[Alpha\] On demand feature view](reference/alpha-on-demand-feature-view.md) +* [\[Alpha\] AWS Lambda feature server](reference/alpha-aws-lambda-feature-server.md) * [Feast CLI reference](reference/feast-cli-commands.md) -* [Python API reference](http://rtd.feast.dev/) +* [Python API reference](http://rtd.feast.dev) * [Usage](reference/usage.md) -## Feast on Kubernetes - -* [Getting started](feast-on-kubernetes/getting-started/README.md) - * [Install Feast](feast-on-kubernetes/getting-started/install-feast/README.md) - * [Docker Compose](feast-on-kubernetes/getting-started/install-feast/quickstart.md) - * [Kubernetes \(with Helm\)](feast-on-kubernetes/getting-started/install-feast/kubernetes-with-helm.md) - * [Amazon EKS \(with Terraform\)](feast-on-kubernetes/getting-started/install-feast/kubernetes-amazon-eks-with-terraform.md) - * [Azure AKS \(with Helm\)](feast-on-kubernetes/getting-started/install-feast/kubernetes-azure-aks-with-helm.md) - * [Azure AKS \(with Terraform\)](feast-on-kubernetes/getting-started/install-feast/kubernetes-azure-aks-with-terraform.md) - * [Google Cloud GKE \(with Terraform\)](feast-on-kubernetes/getting-started/install-feast/google-cloud-gke-with-terraform.md) - * [IBM Cloud Kubernetes Service \(IKS\) and Red Hat OpenShift \(with Kustomize\)](feast-on-kubernetes/getting-started/install-feast/ibm-cloud-iks-with-kustomize.md) - * [Connect to Feast](feast-on-kubernetes/getting-started/connect-to-feast/README.md) - * [Python SDK](feast-on-kubernetes/getting-started/connect-to-feast/python-sdk.md) - * [Feast CLI](feast-on-kubernetes/getting-started/connect-to-feast/feast-cli.md) - * [Learn Feast](feast-on-kubernetes/getting-started/learn-feast.md) -* [Concepts](feast-on-kubernetes/concepts/README.md) - * [Overview](feast-on-kubernetes/concepts/overview.md) - * [Architecture](feast-on-kubernetes/concepts/architecture.md) - * [Entities](feast-on-kubernetes/concepts/entities.md) - * [Sources](feast-on-kubernetes/concepts/sources.md) - * [Feature Tables](feast-on-kubernetes/concepts/feature-tables.md) - * [Stores](feast-on-kubernetes/concepts/stores.md) -* [Tutorials](feast-on-kubernetes/tutorials-1/README.md) - * [Minimal Ride Hailing Example](https://github.com/feast-dev/feast/blob/master/examples/minimal/minimal_ride_hailing.ipynb) -* [User guide](feast-on-kubernetes/user-guide/README.md) - * [Overview](feast-on-kubernetes/user-guide/overview.md) - * [Getting online features](feast-on-kubernetes/user-guide/getting-online-features.md) - * [Getting training features](feast-on-kubernetes/user-guide/getting-training-features.md) - * [Define and ingest features](feast-on-kubernetes/user-guide/define-and-ingest-features.md) - * [Extending Feast](feast-on-kubernetes/user-guide/extending-feast.md) -* [Reference](feast-on-kubernetes/reference-1/README.md) - * [Configuration Reference](feast-on-kubernetes/reference-1/configuration-reference.md) - * [Feast and Spark](feast-on-kubernetes/reference-1/feast-and-spark.md) - * [Metrics Reference](feast-on-kubernetes/reference-1/metrics-reference.md) - * [Limitations](feast-on-kubernetes/reference-1/limitations.md) - * [API Reference](feast-on-kubernetes/reference-1/api/README.md) - * [Go SDK](https://godoc.org/github.com/feast-dev/feast/sdk/go) - * [Java SDK](https://javadoc.io/doc/dev.feast/feast-sdk) - * [Core gRPC API](https://api.docs.feast.dev/grpc/feast/core/coreservice.pb.html) - * [Python SDK](https://api.docs.feast.dev/python/) - * [Serving gRPC API](https://api.docs.feast.dev/grpc/feast/serving/servingservice.pb.html) - * [gRPC Types](https://api.docs.feast.dev/grpc/feast/types/value.pb) - * [Online Store Format Spec](specs/online_store_format.md) - * [Offline Store Format Spec](specs/offline_store_format.md) -* [Advanced](feast-on-kubernetes/advanced-1/README.md) - * [Troubleshooting](feast-on-kubernetes/advanced-1/troubleshooting.md) - * [Metrics](feast-on-kubernetes/advanced-1/metrics.md) - * [Audit Logging](feast-on-kubernetes/advanced-1/audit-logging.md) - * [Security](feast-on-kubernetes/advanced-1/security.md) - * [Upgrading Feast](feast-on-kubernetes/advanced-1/upgrading.md) - -## Contributing - -* [Contribution process](contributing/contributing.md) -* [Development guide](contributing/development-guide.md) -* [Versioning policy](contributing/versioning-policy.md) -* [Release process](contributing/release-process.md) +## Project +* [Contribution process](project/contributing.md) +* [Development guide](project/development-guide.md) +* [Versioning policy](project/versioning-policy.md) +* [Release process](project/release-process.md) +* [Feast 0.9 vs Feast 0.10+](project/feast-0.9-vs-feast-0.10+.md) diff --git a/docs/advanced/audit-logging.md b/docs/advanced/audit-logging.md deleted file mode 100644 index 1870a687bd..0000000000 --- a/docs/advanced/audit-logging.md +++ /dev/null @@ -1,132 +0,0 @@ -# Audit Logging - -{% hint style="warning" %} -This page applies to Feast 0.7. The content may be out of date for Feast 0.8+ -{% endhint %} - -## Introduction - -Feast provides audit logging functionality in order to debug problems and to trace the lineage of events. - -## Audit Log Types - -Audit Logs produced by Feast come in three favors: - -| Audit Log Type | Description | -| :--- | :--- | -| Message Audit Log | Logs service calls that can be used to track Feast request handling. Currently only gRPC request/response is supported. Enabling Message Audit Logs can be resource intensive and significantly increase latency, as such is not recommended on Online Serving. | -| Transition Audit Log | Logs transitions in status in resources managed by Feast \(ie an Ingestion Job becoming RUNNING\). | -| Action Audit Log | Logs actions performed on a specific resource managed by Feast \(ie an Ingestion Job is aborted\). | - -## Configuration - -| Audit Log Type | Description | -| :--- | :--- | -| Message Audit Log | Enabled when both `feast.logging.audit.enabled` and `feast.logging.audit.messageLogging.enabled` is set to `true` | -| Transition Audit Log | Enabled when `feast.logging.audit.enabled` is set to `true` | -| Action Audit Log | Enabled when `feast.logging.audit.enabled` is set to `true` | - -## JSON Format - -Audit Logs produced by Feast are written to the console similar to normal logs but in a structured, machine parsable JSON. Example of a Message Audit Log JSON entry produced: - -```text -{ - "message": { - "logType": "FeastAuditLogEntry", - "kind": "MESSAGE", - "statusCode": "OK", - "request": { - "filter": { - "project": "dummy", - } - }, - "application": "Feast", - "response": {}, - "method": "ListFeatureTables", - "identity": "105960238928959148073", - "service": "CoreService", - "component": "feast-core", - "id": "45329ea9-0d48-46c5-b659-4604f6193711", - "version": "0.10.0-SNAPSHOT" - }, - "hostname": "feast.core" - "timestamp": "2020-10-20T04:45:24Z", - "severity": "INFO", -} -``` - -## Log Entry Schema - -Fields common to all Audit Log Types: - -| Field | Description | -| :--- | :--- | -| `logType` | Log Type. Always set to `FeastAuditLogEntry`. Useful for filtering out Feast audit logs. | -| `application` | Application. Always set to `Feast`. | -| `component` | Feast Component producing the Audit Log. Set to `feast-core` for Feast Core and `feast-serving` for Feast Serving. Use to filtering out Audit Logs by component. | -| `version` | Version of Feast producing this Audit Log. Use to filtering out Audit Logs by version. | - -Fields in Message Audit Log Type - -| Field | Description | -| :--- | :--- | -| `id` | Generated UUID that uniquely identifies the service call. | -| `service` | Name of the Service that handled the service call. | -| `method` | Name of the Method that handled the service call. Useful for filtering Audit Logs by method \(ie `ApplyFeatureTable` calls\) | -| `request` | Full request submitted by client in the service call as JSON. | -| `response` | Full response returned to client by the service after handling the service call as JSON. | -| `identity` | Identity of the client making the service call as an user Id. Only set when Authentication is enabled. | -| `statusCode` | The status code returned by the service handling the service call \(ie `OK` if service call handled without error\). | - -Fields in Action Audit Log Type - -| Field | Description | -| :--- | :--- | -| `action` | Name of the action taken on the resource. | -| `resource.type` | Type of resource of which the action was taken on \(i.e `FeatureTable`\) | -| resource.id | Identifier specifying the specific resource of which the action was taken on. | - -Fields in Transition Audit Log Type - -| Field | Description | -| :--- | :--- | -| `status` | The new status that the resource transitioned to | -| `resource.type` | Type of resource of which the transition occurred \(i.e `FeatureTable`\) | -| `resource.id` | Identifier specifying the specific resource of which the transition occurred. | - -## Log Forwarder - -Feast currently only supports forwarding Request/Response \(Message Audit Log Type\) logs to an external fluentD service with `feast.**` Fluentd tag. - -### Request/Response Log Example - -```text -{ - "id": "45329ea9-0d48-46c5-b659-4604f6193711", - "service": "CoreService" - "status_code": "OK", - "identity": "105960238928959148073", - "method": "ListProjects", - "request": {}, - "response": { - "projects": [ - "default", "project1", "project2" - ] - } - "release_name": 506.457.14.512 -} -``` - -### Configuration - -The Fluentd Log Forwarder configured with the with the following configuration options in `application.yml`: - -| Settings | Description | -| :--- | :--- | -| `feast.logging.audit.messageLogging.destination` | `fluentd` | -| `feast.logging.audit.messageLogging.fluentdHost` | `localhost` | -| `feast.logging.audit.messageLogging.fluentdPort` | `24224` | - -When using Fluentd as the Log forwarder, a Feast `release_name` can be logged instead of the IP address \(eg. IP of Kubernetes pod deployment\), by setting an environment variable `RELEASE_NAME` when deploying Feast. - diff --git a/docs/advanced/metrics.md b/docs/advanced/metrics.md deleted file mode 100644 index 5ea69f883f..0000000000 --- a/docs/advanced/metrics.md +++ /dev/null @@ -1,59 +0,0 @@ -# Metrics - -{% hint style="warning" %} -This page applies to Feast 0.7. The content may be out of date for Feast 0.8+ -{% endhint %} - -## Overview - -Feast Components export metrics that can provide insight into Feast behavior: - -* [Feast Ingestion Jobs can be configured to push metrics into StatsD](metrics.md#pushing-ingestion-metrics-to-statsd) -* [Prometheus can be configured to scrape metrics from Feast Core and Serving.](metrics.md#exporting-feast-metrics-to-prometheus) - -See the [Metrics Reference ](../reference/metrics-reference.md)for documentation on metrics are exported by Feast. - -{% hint style="info" %} -Feast Job Controller currently does not export any metrics on its own. However its `application.yml` is used to configure metrics export for ingestion jobs. -{% endhint %} - -## Pushing Ingestion Metrics to StatsD - -### **Feast Ingestion Job** - -Feast Ingestion Job can be configured to push Ingestion metrics to a StatsD instance. Metrics export to StatsD for Ingestion Job is configured in Job Controller's `application.yml` under `feast.jobs.metrics` - -```yaml - feast: - jobs: - metrics: - # Enables Statd metrics export if true. - enabled: true - type: statsd - # Host and port of the StatsD instance to export to. - host: localhost - port: 9125 -``` - -{% hint style="info" %} -If you need Ingestion Metrics in Prometheus or some other metrics backend, use a metrics forwarder to forward Ingestion Metrics from StatsD to the metrics backend of choice. \(ie Use [`prometheus-statsd-exporter`](https://github.com/prometheus/statsd_exporter) to forward metrics to Prometheus\). -{% endhint %} - -## Exporting Feast Metrics to Prometheus - -### **Feast Core and Serving** - -Feast Core and Serving exports metrics to a Prometheus instance via Prometheus scraping its `/metrics` endpoint. Metrics export to Prometheus for Core and Serving can be configured via their corresponding `application.yml` - -```yaml -server: - # Configures the port where metrics are exposed via /metrics for Prometheus to scrape. - port: 8081 -``` - -[Direct Prometheus](https://prometheus.io/docs/prometheus/latest/configuration/configuration/#scrape_config) to scrape directly from Core and Serving's `/metrics` endpoint. - -## Further Reading - -See the [Metrics Reference ](../reference/metrics-reference.md)for documentation on metrics are exported by Feast. - diff --git a/docs/advanced/security.md b/docs/advanced/security.md deleted file mode 100644 index 769260074f..0000000000 --- a/docs/advanced/security.md +++ /dev/null @@ -1,480 +0,0 @@ ---- -description: 'Secure Feast with SSL/TLS, Authentication and Authorization.' ---- - -# Security - -{% hint style="warning" %} -This page applies to Feast 0.7. The content may be out of date for Feast 0.8+ -{% endhint %} - -### Overview - -![Overview of Feast's Security Methods.](../.gitbook/assets/untitled-25-1-%20%282%29%20%282%29%20%282%29%20%283%29%20%283%29%20%283%29%20%283%29%20%283%29%20%283%29%20%281%29%20%281%29.jpg) - -Feast supports the following security methods: - -* [SSL/TLS on messaging between Feast Core, Feast Online Serving and Feast SDKs.](security.md#2-ssl-tls) -* [Authentication to Feast Core and Serving based on Open ID Connect ID tokens.](security.md#3-authentication) -* [Authorization based on project membership and delegating authorization grants to external Authorization Server.](security.md#4-authorization) - -[Important considerations when integrating Authentication/Authorization](security.md#5-authentication-and-authorization). - -### **SSL/TLS** - -Feast supports SSL/TLS encrypted inter-service communication among Feast Core, Feast Online Serving, and Feast SDKs. - -#### Configuring SSL/TLS on Feast Core and Feast Serving - -The following properties configure SSL/TLS. These properties are located in their corresponding `application.yml`files: - -| Configuration Property | Description | -| :--- | :--- | -| `grpc.server.security.enabled` | Enables SSL/TLS functionality if `true` | -| `grpc.server.security.certificateChain` | Provide the path to certificate chain. | -| `grpc.server.security.privateKey` | Provide the to private key. | - -> Read more on enabling SSL/TLS in the[ gRPC starter docs.](https://yidongnan.github.io/grpc-spring-boot-starter/en/server/security.html#enable-transport-layer-security) - -#### Configuring SSL/TLS on Python SDK/CLI - -To enable SSL/TLS in the [Feast Python SDK](https://api.docs.feast.dev/python/#feast.client.Client) or [Feast CLI](../getting-started/connect-to-feast/feast-cli.md), set the config options via `feast config`: - -| Configuration Option | Description | -| :--- | :--- | -| `core_enable_ssl` | Enables SSL/TLS functionality on connections to Feast core if `true` | -| `serving_enable_ssl` | Enables SSL/TLS functionality on connections to Feast Online Serving if `true` | -| `core_server_ssl_cert` | Optional. Specifies the path of the root certificate used to verify Core Service's identity. If omitted, uses system certificates. | -| `serving_server_ssl_cert` | Optional. Specifies the path of the root certificate used to verify Serving Service's identity. If omitted, uses system certificates. | - -{% hint style="info" %} -The Python SDK automatically uses SSL/TLS when connecting to Feast Core and Feast Online Serving via port 443. -{% endhint %} - -#### Configuring SSL/TLS on Go SDK - -Configure SSL/TLS on the [Go SDK](https://godoc.org/github.com/feast-dev/feast/sdk/go) by passing configuration via `SecurityConfig`: - -```go -cli, err := feast.NewSecureGrpcClient("localhost", 6566, feast.SecurityConfig{ - EnableTLS: true, - TLSCertPath: "/path/to/cert.pem", -})Option -``` - -| Config Option | Description | -| :--- | :--- | -| `EnableTLS` | Enables SSL/TLS functionality when connecting to Feast if `true` | -| `TLSCertPath` | Optional. Provides the path of the root certificate used to verify Feast Service's identity. If omitted, uses system certificates. | - -#### Configuring SSL/TLS on **Java** SDK - -Configure SSL/TLS on the [Feast Java SDK](https://javadoc.io/doc/dev.feast/feast-sdk) by passing configuration via `SecurityConfig`: - -```java -FeastClient client = FeastClient.createSecure("localhost", 6566, - SecurityConfig.newBuilder() - .setTLSEnabled(true) - .setCertificatePath(Optional.of("/path/to/cert.pem")) - .build()); -``` - -| Config Option | Description | -| :--- | :--- | -| `setTLSEnabled()` | Enables SSL/TLS functionality when connecting to Feast if `true` | -| `setCertificatesPath()` | Optional. Set the path of the root certificate used to verify Feast Service's identity. If omitted, uses system certificates. | - -### **Authentication** - -{% hint style="warning" %} -To prevent man in the middle attacks, we recommend that SSL/TLS be implemented prior to authentication. -{% endhint %} - -Authentication can be implemented to identify and validate client requests to Feast Core and Feast Online Serving. Currently, Feast uses[ ](https://auth0.com/docs/protocols/openid-connect-protocol)[Open ID Connect \(OIDC\)](https://auth0.com/docs/protocols/openid-connect-protocol) ID tokens \(i.e. [Google Open ID Connect](https://developers.google.com/identity/protocols/oauth2/openid-connect)\) to authenticate client requests. - -#### Configuring Authentication in Feast Core and Feast Online Serving - -Authentication can be configured for Feast Core and Feast Online Serving via properties in their corresponding `application.yml` files: - -| Configuration Property | Description | -| :--- | :--- | -| `feast.security.authentication.enabled` | Enables Authentication functionality if `true` | -| `feast.security.authentication.provider` | Authentication Provider type. Currently only supports `jwt` | -| `feast.security.authentication.option.jwkEndpointURI` | HTTPS URL used by Feast to retrieved the [JWK](https://tools.ietf.org/html/rfc7517) used to verify OIDC ID tokens. | - -{% hint style="info" %} -`jwkEndpointURI`is set to retrieve Google's OIDC JWK by default, allowing OIDC ID tokens issued by Google to be used for authentication. -{% endhint %} - -Behind the scenes, Feast Core and Feast Online Serving authenticate by: - -* Extracting the OIDC ID token `TOKEN`from gRPC metadata submitted with request: - -```text -('authorization', 'Bearer: TOKEN') -``` - -* Validates token's authenticity using the JWK retrieved from the `jwkEndpointURI` - -#### **Authenticating Serving with Feast Core** - -Feast Online Serving communicates with Feast Core during normal operation. When both authentication and authorization are enabled on Feast Core, Feast Online Serving is forced to authenticate its requests to Feast Core. Otherwise, Feast Online Serving produces an Authentication failure error when connecting to Feast Core. - - Properties used to configure Serving authentication via `application.yml`: - -| Configuration Property | Description | -| :--- | :--- | -| `feast.core-authentication.enabled` | Requires Feast Online Serving to authenticate when communicating with Feast Core. | -| `feast.core-authentication.provider` | Selects provider Feast Online Serving uses to retrieve credentials then used to authenticate requests to Feast Core. Valid providers are `google` and `oauth`. | - -{% tabs %} -{% tab title="Google Provider" %} -Google Provider automatically extracts the credential from the credential JSON file. - -* Set [`GOOGLE_APPLICATION_CREDENTIALS` environment variable](https://cloud.google.com/docs/authentication/getting-started#setting_the_environment_variable) to the path of the credential in the JSON file. -{% endtab %} - -{% tab title="OAuth Provider" %} -OAuth Provider makes an OAuth [client credentials](https://auth0.com/docs/flows/call-your-api-using-the-client-credentials-flow) request to obtain the credential. OAuth requires the following options to be set at `feast.security.core-authentication.options.`: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
Configuration PropertyDescription
oauth_url - Target URL receiving the client-credentials request.
grant_type - OAuth grant type. Set as client_credentials -
client_id - Client Id used in the client-credentials request.
client_secret - Client secret used in the client-credentials request.
audience - -

Target audience of the credential. Set to host URL of Feast Core.

-

(i.e. https://localhost if Feast Core listens on localhost).

-
jwkEndpointURI - HTTPS URL used to retrieve a JWK that can be used to decode the credential.
-{% endtab %} -{% endtabs %} - -#### **Enabling Authentication in Python SDK/CLI** - -Configure the [Feast Python SDK](https://api.docs.feast.dev/python/) and [Feast CLI](../getting-started/connect-to-feast/feast-cli.md) to use authentication via `feast config`: - -```python -$ feast config set enable_auth true -``` - -| Configuration Option | Description | -| :--- | :--- | -| `enable_auth` | Enables authentication functionality if set to `true`. | -| `auth_provider` | Use an authentication provider to obtain a credential for authentication. Currently supports `google` and `oauth`. | -| `auth_token` | Manually specify a static token for use in authentication. Overrules `auth_provider` if both are set. | - -{% tabs %} -{% tab title="Google Provider" %} -Google Provider automatically finds and uses Google Credentials to authenticate requests: - -* Google Provider automatically uses established credentials for authenticating requests if you are already authenticated with the `gcloud` CLI via: - -```text -$ gcloud auth application-default login -``` - -* Alternatively Google Provider can be configured to use the credentials in the JSON file via`GOOGLE_APPLICATION_CREDENTIALS` environmental variable \([Google Cloud Authentication documentation](https://cloud.google.com/docs/authentication/getting-started)\): - -```bash -$ export GOOGLE_APPLICATION_CREDENTIALS="path/to/key.json" -``` -{% endtab %} - -{% tab title="OAuth Provider" %} -OAuth Provider makes an OAuth [client credentials](https://auth0.com/docs/flows/call-your-api-using-the-client-credentials-flow) request to obtain the credential/token used to authenticate Feast requests. The OAuth provider requires the following config options to be set via `feast config`: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
Configuration PropertyDescription
oauth_token_request_url - Target URL receiving the client-credentials request.
oauth_grant_type - OAuth grant type. Set as client_credentials -
oauth_client_id - Client Id used in the client-credentials request.
oauth_client_secret - Client secret used in the client-credentials request.
oauth_audience - -

Target audience of the credential. Set to host URL of target Service.

-

(https://localhost if Service listens on localhost).

-
-{% endtab %} -{% endtabs %} - -#### **Enabling Authentication in Go SDK** - -Configure the [Feast Java SDK](https://javadoc.io/doc/dev.feast/feast-sdk/latest/com/gojek/feast/package-summary.html) to use authentication by specifying the credential via `SecurityConfig`: - -```go -// error handling omitted. -// Use Google Credential as provider. -cred, _ := feast.NewGoogleCredential("localhost:6566") -cli, _ := feast.NewSecureGrpcClient("localhost", 6566, feast.SecurityConfig{ - // Specify the credential to provide tokens for Feast Authentication. - Credential: cred, -}) -``` - -{% tabs %} -{% tab title="Google Credential" %} -Google Credential uses Service Account credentials JSON file set via`GOOGLE_APPLICATION_CREDENTIALS` environmental variable \([Google Cloud Authentication documentation](https://cloud.google.com/docs/authentication/getting-started)\) to obtain tokens for Authenticating Feast requests: - -* Exporting `GOOGLE_APPLICATION_CREDENTIALS` - -```bash -$ export GOOGLE_APPLICATION_CREDENTIALS="path/to/key.json" -``` - -* Create a Google Credential with target audience. - -```go -cred, _ := feast.NewGoogleCredential("localhost:6566") -``` - -> Target audience of the credential should be set to host URL of target Service. \(ie `https://localhost` if Service listens on `localhost`\): -{% endtab %} - -{% tab title="OAuth Credential" %} -OAuth Credential makes an OAuth [client credentials](https://auth0.com/docs/flows/call-your-api-using-the-client-credentials-flow) request to obtain the credential/token used to authenticate Feast requests: - -* Create OAuth Credential with parameters: - -```go -cred := feast.NewOAuthCredential("localhost:6566", "client_id", "secret", "https://oauth.endpoint/auth") -``` - - - - - - - - - - - - - - - - - - - - - - - - - - -
ParameterDescription
audience - -

Target audience of the credential. Set to host URL of target Service.

-

( https://localhost if Service listens on localhost).

-
clientId - Client Id used in the client-credentials request.
clientSecret - Client secret used in the client-credentials request.
endpointURL - Target URL to make the client-credentials request to.
-{% endtab %} -{% endtabs %} - -#### **Enabling Authentication in Java SDK** - -Configure the [Feast Java SDK](https://javadoc.io/doc/dev.feast/feast-sdk/latest/com/gojek/feast/package-summary.html) to use authentication by setting credentials via `SecurityConfig`: - -```java -// Use GoogleAuthCredential as provider. -CallCredentials credentials = new GoogleAuthCredentials( - Map.of("audience", "localhost:6566")); - -FeastClient client = FeastClient.createSecure("localhost", 6566, - SecurityConfig.newBuilder() - // Specify the credentials to provide tokens for Feast Authentication. - .setCredentials(Optional.of(creds)) - .build()); -``` - -{% tabs %} -{% tab title="GoogleAuthCredentials" %} -GoogleAuthCredentials uses Service Account credentials JSON file set via`GOOGLE_APPLICATION_CREDENTIALS` environmental variable \([Google Cloud authentication documentation](https://cloud.google.com/docs/authentication/getting-started)\) to obtain tokens for Authenticating Feast requests: - -* Exporting `GOOGLE_APPLICATION_CREDENTIALS` - -```bash -$ export GOOGLE_APPLICATION_CREDENTIALS="path/to/key.json" -``` - -* Create a Google Credential with target audience. - -```java -CallCredentials credentials = new GoogleAuthCredentials( - Map.of("audience", "localhost:6566")); -``` - -> Target audience of the credentials should be set to host URL of target Service. \(ie `https://localhost` if Service listens on `localhost`\): -{% endtab %} - -{% tab title="OAuthCredentials" %} -OAuthCredentials makes an OAuth [client credentials](https://auth0.com/docs/flows/call-your-api-using-the-client-credentials-flow) request to obtain the credential/token used to authenticate Feast requests: - -* Create OAuthCredentials with parameters: - -```java -CallCredentials credentials = new OAuthCredentials(Map.of( - "audience": "localhost:6566", - "grant_type", "client_credentials", - "client_id", "some_id", - "client_id", "secret", - "oauth_url", "https://oauth.endpoint/auth", - "jwkEndpointURI", "https://jwk.endpoint/jwk")); -``` - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
ParameterDescription
audience - -

Target audience of the credential. Set to host URL of target Service.

-

( https://localhost if Service listens on localhost).

-
grant_type - OAuth grant type. Set as client_credentials -
client_id - Client Id used in the client-credentials request.
client_secret - Client secret used in the client-credentials request.
oauth_url - Target URL to make the client-credentials request to obtain credential.
jwkEndpointURI - HTTPS URL used to retrieve a JWK that can be used to decode the credential.
-{% endtab %} -{% endtabs %} - -### Authorization - -{% hint style="info" %} -Authorization requires that authentication be configured to obtain a user identity for use in authorizing requests. -{% endhint %} - -Authorization provides access control to FeatureTables and/or Features based on project membership. Users who are members of a project are authorized to: - -* Create and/or Update a Feature Table in the Project. -* Retrieve Feature Values for Features in that Project. - -#### **Authorization API/Server** - -![Feast Authorization Flow](../.gitbook/assets/rsz_untitled23%20%282%29%20%282%29%20%282%29%20%283%29%20%283%29%20%283%29%20%283%29%20%283%29%20%283%29%20%283%29.jpg) - -Feast delegates Authorization grants to an external Authorization Server that implements the [Authorization Open API specification](https://github.com/feast-dev/feast/blob/master/common/src/main/resources/api.yaml). - -* Feast checks whether a user is authorized to make a request by making a `checkAccessRequest` to the Authorization Server. -* The Authorization Server should return a `AuthorizationResult` with whether the user is allowed to make the request. - -Authorization can be configured for Feast Core and Feast Online Serving via properties in their corresponding `application.yml` - -| Configuration Property | Description | -| :--- | :--- | -| `feast.security.authorization.enabled` | Enables authorization functionality if `true`. | -| `feast.security.authorization.provider` | Authentication Provider type. Currently only supports `http` | -| `feast.security.authorization.option.authorizationUrl` | URL endpoint of Authorization Server to make check access requests to. | -| `feast.security.authorization.option.subjectClaim` | Optional. Name of the claim of the to extract from the ID Token to include in the check access request as Subject. | - -{% hint style="info" %} -This example of the [Authorization Server with Keto](https://github.com/feast-dev/feast-keto-auth-server) can be used as a reference implementation for implementing an Authorization Server that Feast supports. -{% endhint %} - -### **Authentication & Authorization** - -When using Authentication & Authorization, consider: - -* Enabling Authentication without Authorization makes authentication **optional**. You can still send unauthenticated requests. -* Enabling Authorization forces all requests to be authenticated. Requests that are not authenticated are **dropped.** - - - diff --git a/docs/advanced/troubleshooting.md b/docs/advanced/troubleshooting.md deleted file mode 100644 index 1060466d30..0000000000 --- a/docs/advanced/troubleshooting.md +++ /dev/null @@ -1,136 +0,0 @@ -# Troubleshooting - -{% hint style="warning" %} -This page applies to Feast 0.7. The content may be out of date for Feast 0.8+ -{% endhint %} - -If at any point in time you cannot resolve a problem, please see the [Community](../community.md) section for reaching out to the Feast community. - -### How can I verify that all services are operational? - -#### Docker Compose - -The containers should be in an `up` state: - -```text -docker ps -``` - -#### Google Kubernetes Engine - -All services should either be in a `RUNNING` state or `COMPLETED`state: - -```text -kubectl get pods -``` - -### How can I verify that I can connect to all services? - -First locate the the host and port of the Feast Services. - -#### **Docker Compose \(from inside the docker network\)** - -You will probably need to connect using the hostnames of services and standard Feast ports: - -```bash -export FEAST_CORE_URL=core:6565 -export FEAST_ONLINE_SERVING_URL=online_serving:6566 -export FEAST_HISTORICAL_SERVING_URL=historical_serving:6567 -export FEAST_JOBCONTROLLER_URL=jobcontroller:6570 -``` - -#### **Docker Compose \(from outside the docker network\)** - -You will probably need to connect using `localhost` and standard ports: - -```bash -export FEAST_CORE_URL=localhost:6565 -export FEAST_ONLINE_SERVING_URL=localhost:6566 -export FEAST_HISTORICAL_SERVING_URL=localhost:6567 -export FEAST_JOBCONTROLLER_URL=localhost:6570 -``` - -#### **Google Kubernetes Engine \(GKE\)** - -You will need to find the external IP of one of the nodes as well as the NodePorts. Please make sure that your firewall is open for these ports: - -```bash -export FEAST_IP=$(kubectl describe nodes | grep ExternalIP | awk '{print $2}' | head -n 1) -export FEAST_CORE_URL=${FEAST_IP}:32090 -export FEAST_ONLINE_SERVING_URL=${FEAST_IP}:32091 -export FEAST_HISTORICAL_SERVING_URL=${FEAST_IP}:32092 -``` - -`netcat`, `telnet`, or even `curl` can be used to test whether all services are available and ports are open, but `grpc_cli` is the most powerful. It can be installed from [here](https://github.com/grpc/grpc/blob/master/doc/command_line_tool.md). - -#### Testing Connectivity From Feast Services: - -Use `grpc_cli` to test connetivity by listing the gRPC methods exposed by Feast services: - -```bash -grpc_cli ls ${FEAST_CORE_URL} feast.core.CoreService -``` - -```bash -grpc_cli ls ${FEAST_JOBCONTROLLER_URL} feast.core.JobControllerService -``` - -```bash -grpc_cli ls ${FEAST_HISTORICAL_SERVING_URL} feast.serving.ServingService -``` - -```bash -grpc_cli ls ${FEAST_ONLINE_SERVING_URL} feast.serving.ServingService -``` - -### How can I print logs from the Feast Services? - -Feast will typically have three services that you need to monitor if something goes wrong. - -* Feast Core -* Feast Job Controller -* Feast Serving \(Online\) -* Feast Serving \(Batch\) - -In order to print the logs from these services, please run the commands below. - -#### Docker Compose - -Use `docker-compose logs` to obtain Feast component logs: - -```text - docker logs -f feast_core_1 -``` - -```text - docker logs -f feast_jobcontroller_1 -``` - -```text -docker logs -f feast_historical_serving_1 -``` - -```text -docker logs -f feast_online_serving_1 -``` - -#### Google Kubernetes Engine - -Use `kubectl logs` to obtain Feast component logs: - -```text -kubectl logs $(kubectl get pods | grep feast-core | awk '{print $1}') -``` - -```text -kubectl logs $(kubectl get pods | grep feast-jobcontroller | awk '{print $1}') -``` - -```text -kubectl logs $(kubectl get pods | grep feast-serving-batch | awk '{print $1}') -``` - -```text -kubectl logs $(kubectl get pods | grep feast-serving-online | awk '{print $1}') -``` - diff --git a/docs/advanced/upgrading.md b/docs/advanced/upgrading.md deleted file mode 100644 index 3c7b95d544..0000000000 --- a/docs/advanced/upgrading.md +++ /dev/null @@ -1,113 +0,0 @@ -# Upgrading Feast - -### Migration from v0.6 to v0.7 - -#### Feast Core Validation changes - -In v0.7, Feast Core no longer accepts starting with number \(0-9\) and using dash in names for: - -* Project -* Feature Set -* Entities -* Features - -Migrate all project, feature sets, entities, feature names: - -* with ‘-’ by recreating them with '-' replace with '\_' -* recreate any names with a number \(0-9\) as the first letter to one without. - -Feast now prevents feature sets from being applied if no store is subscribed to that Feature Set. - -* Ensure that a store is configured to subscribe to the Feature Set before applying the Feature Set. - -#### Feast Core's Job Coordinator is now Feast Job Controller - -In v0.7, Feast Core's Job Coordinator has been decoupled from Feast Core and runs as a separate Feast Job Controller application. See its [Configuration reference](../reference/configuration-reference.md#2-feast-core-serving-and-job-controller) for how to configure Feast Job Controller. - -**Ingestion Job API** - -In v0.7, the following changes are made to the Ingestion Job API: - -* Changed List Ingestion Job API to return list of `FeatureSetReference` instead of list of FeatureSet in response. -* Moved `ListIngestionJobs`, `StopIngestionJob`, `RestartIngestionJob` calls from `CoreService` to `JobControllerService`. -* Python SDK/CLI: Added new [Job Controller client ](https://github.com/feast-dev/feast/blob/master/sdk/python/feast/contrib/job_controller/client.py)and `jobcontroller_url` config option. - -Users of the Ingestion Job API via gRPC should migrate by: - -* Add new client to connect to Job Controller endpoint to call `JobControllerService` and call `ListIngestionJobs`, `StopIngestionJob`, `RestartIngestionJob` from new client. -* Migrate code to accept feature references instead of feature sets returned in `ListIngestionJobs` response. - -Users of Ingestion Job via Python SDK \(ie `feast ingest-jobs list` or `client.stop_ingest_job()` etc.\) should migrate by: - -* `ingest_job()`methods only: Create a new separate [Job Controller client](https://github.com/feast-dev/feast/blob/master/sdk/python/feast/contrib/job_controller/client.py) to connect to the job controller and call `ingest_job()` methods using the new client. -* Configure the Feast Job Controller endpoint url via `jobcontroller_url` config option. - -#### Configuration Properties Changes - -* Rename `feast.jobs.consolidate-jobs-per-source property` to `feast.jobs.controller.consolidate-jobs-per-sources` -* Rename`feast.security.authorization.options.subjectClaim` to `feast.security.authentication.options.subjectClaim` -* Rename `feast.logging.audit.messageLoggingEnabled` to `feast.audit.messageLogging.enabled` - -### Migration from v0.5 to v0.6 - -#### Database schema - -In Release 0.6 we introduced [Flyway](https://flywaydb.org/) to handle schema migrations in PostgreSQL. Flyway is integrated into `core` and for now on all migrations will be run automatically on `core` start. It uses table `flyway_schema_history` in the same database \(also created automatically\) to keep track of already applied migrations. So no specific maintenance should be needed. - -If you already have existing deployment of feast 0.5 - Flyway will detect existing tables and omit first baseline migration. - -After `core` started you should have `flyway_schema_history` look like this - -```text ->> select version, description, script, checksum from flyway_schema_history - -version | description | script | checksum ---------+-----------------------------------------+-----------------------------------------+------------ - 1 | << Flyway Baseline >> | << Flyway Baseline >> | - 2 | RELEASE 0.6 Generalizing Source AND ... | V2__RELEASE_0.6_Generalizing_Source_... | 1537500232 -``` - -In this release next major schema changes were done: - -* Source is not shared between FeatureSets anymore. It's changed to 1:1 relation - - and source's primary key is now auto-incremented number. - -* Due to generalization of Source `sources.topics` & `sources.bootstrap_servers` columns were deprecated. - - They will be replaced with `sources.config`. Data migration handled by code when respected Source is used. - - `topics` and `bootstrap_servers` will be deleted in the next release. - -* Job \(table `jobs`\) is no longer connected to `Source` \(table `sources`\) since it uses consolidated source for optimization purposes. - - All data required by Job would be embedded in its table. - -New Models \(tables\): - -* feature\_statistics - -Minor changes: - -* FeatureSet has new column version \(see [proto](https://github.com/feast-dev/feast/blob/master/protos/feast/core/FeatureSet.proto) for details\) -* Connecting table `jobs_feature_sets` in many-to-many relation between jobs & feature sets - - has now `version` and `delivery_status`. - -### Migration from v0.4 to v0.6 - -#### Database - -For all versions earlier than 0.5 seamless migration is not feasible due to earlier breaking changes and creation of new database will be required. - -Since database will be empty - first \(baseline\) migration would be applied: - -```text ->> select version, description, script, checksum from flyway_schema_history - -version | description | script | checksum ---------+-----------------------------------------+-----------------------------------------+------------ - 1 | Baseline | V1__Baseline.sql | 1091472110 - 2 | RELEASE 0.6 Generalizing Source AND ... | V2__RELEASE_0.6_Generalizing_Source_... | 1537500232 -``` - diff --git a/docs/architecture.md b/docs/architecture.md deleted file mode 100644 index a2dc5cd6a8..0000000000 --- a/docs/architecture.md +++ /dev/null @@ -1,2 +0,0 @@ -# Architecture - diff --git a/docs/architecture.png b/docs/architecture.png deleted file mode 100644 index 6d56a62360..0000000000 Binary files a/docs/architecture.png and /dev/null differ diff --git a/docs/assets/arch.png b/docs/assets/arch.png deleted file mode 100644 index bc655b60f3..0000000000 Binary files a/docs/assets/arch.png and /dev/null differ diff --git a/docs/assets/feast-components-overview.png b/docs/assets/feast-components-overview.png deleted file mode 100644 index 1f69bb7ed8..0000000000 Binary files a/docs/assets/feast-components-overview.png and /dev/null differ diff --git a/docs/assets/feast-marchitecture.png b/docs/assets/feast-marchitecture.png new file mode 100644 index 0000000000..0a7b044b09 Binary files /dev/null and b/docs/assets/feast-marchitecture.png differ diff --git a/docs/build-a-training-dataset.md b/docs/build-a-training-dataset.md deleted file mode 100644 index eff44fdf9c..0000000000 --- a/docs/build-a-training-dataset.md +++ /dev/null @@ -1,2 +0,0 @@ -# Build a training dataset - diff --git a/docs/community.md b/docs/community.md index 0cf210e59b..c0ead3dda1 100644 --- a/docs/community.md +++ b/docs/community.md @@ -1,7 +1,7 @@ # Community {% hint style="success" %} -**Office Hours:** Have a question, feature request, idea, or just looking to speak to a real person? Come and join the [Feast Office Hours](https://calendly.com/d/gc29-y88c/chat-with-feast-maintainers) on Friday and chat with a Feast contributor! +**Speak to us:** Have a question, feature request, idea, or just looking to speak to a real person? Set up a meeting with a Feast maintainer over [here](https://calendly.com/d/x2ry-g5bb/meet-with-feast-team)! {% endhint %} ## Links & Resources @@ -10,8 +10,10 @@ * [Mailing list](https://groups.google.com/d/forum/feast-dev): We have both a user and developer mailing list. * Feast users should join [feast-discuss@googlegroups.com](mailto:feast-discuss@googlegroups.com) group by clicking [here](https://groups.google.com/g/feast-discuss). * Feast developers should join [feast-dev@googlegroups.com](mailto:feast-dev@googlegroups.com) group by clicking [here](https://groups.google.com/d/forum/feast-dev). + * People interested in the Feast community newsletter should join feast-announce by clicking [here](https://groups.google.com/d/forum/feast-announce). +* [Community Calendar](https://calendar.google.com/calendar/u/0?cid=ZTFsZHVhdGM3MDU3YTJucTBwMzNqNW5rajBAZ3JvdXAuY2FsZW5kYXIuZ29vZ2xlLmNvbQ): Includes community calls and design meetings. * [Google Folder](https://drive.google.com/drive/u/0/folders/1jgMHOPDT2DvBlJeO9LCM79DP4lm4eOrR): This folder is used as a central repository for all Feast resources. For example: - * Design proposals in the form of Request for Comments \(RFC\). + * Design proposals in the form of Request for Comments (RFC). * User surveys and meeting minutes. * Slide decks of conferences our contributors have spoken at. * [Feast GitHub Repository](https://github.com/feast-dev/feast/): Find the complete Feast codebase on GitHub. @@ -19,25 +21,23 @@ ## How can I get help? -* **Slack:** Need to speak to a human? Come ask a question in our Slack channel \(link above\). +* **Slack:** Need to speak to a human? Come ask a question in our Slack channel (link above). * **GitHub Issues:** Found a bug or need a feature? [Create an issue on GitHub](https://github.com/feast-dev/feast/issues/new). * **StackOverflow:** Need to ask a question on how to use Feast? We also monitor and respond to [StackOverflow](https://stackoverflow.com/questions/tagged/feast). ## Community Calls -We have a user and contributor community call every two weeks \(Asia & US friendly\). +We have a user and contributor community call every two weeks (Asia & US friendly). {% hint style="info" %} Please join the above Feast user groups in order to see calendar invites to the community calls {% endhint %} -### Frequency \(alternating times every 2 weeks\) +### Frequency (every 2 weeks) -* Tuesday 18:00 pm to 18:30 pm \(US, Asia\) -* Tuesday 10:00 am to 10:30 am \(US, Europe\) +* Tuesday 10:00 am to 10:30 am PST ### Links * Zoom: [https://zoom.us/j/6325193230](https://zoom.us/j/6325193230) -* Meeting notes: [https://bit.ly/feast-notes](https://bit.ly/feast-notes%20) - +* Meeting notes (incl recordings): [https://bit.ly/feast-notes](https://bit.ly/feast-notes) diff --git a/docs/concepts/architecture.md b/docs/concepts/architecture.md deleted file mode 100644 index 568ac1aa7d..0000000000 --- a/docs/concepts/architecture.md +++ /dev/null @@ -1,33 +0,0 @@ -# Architecture - -![Feast 0.10 Architecture Diagram](../.gitbook/assets/image%20%284%29.png) - -### Functionality - -* **Create Batch Features:** ELT/ETL systems like Spark and SQL are used to transform data in the batch store. -* **Feast Apply:** The user \(or CI\) publishes versioned controlled feature definitions using `feast apply`. This CLI command updates infrastructure and persists definitions in the object store registry. -* **Feast Materialize:** The user \(or scheduler\) executes `feast materialize` which loads features from the offline store into the online store. -* **Model Training:** A model training pipeline is launched. It uses the Feast Python SDK to retrieve a training dataset and trains a model. -* **Get Historical Features:** Feast exports a point-in-time correct training dataset based on the list of features and entity dataframe provided by the model training pipeline. -* **Deploy Model:** The trained model binary \(and list of features\) are deployed into a model serving system. This step is not executed by Feast. -* **Prediction:** A backend system makes a request for a prediction from the model serving service. -* **Get Online Features:** The model serving service makes a request to the Feast Online Serving service for online features using a Feast SDK. - -### Components - -A complete Feast deployment contains the following components: - -* **Feast Online Serving:** Provides low-latency access to feature values stores in the online store. This component is optional. Teams can also read feature values directly from the online store if necessary. -* **Feast Registry**: An object store \(GCS, S3\) based registry used to persist feature definitions that are registered with the feature store. Systems can discover feature data by interacting with the registry through the Feast SDK. -* **Feast Python SDK/CLI:** The primary user facing SDK. Used to: - * Manage version controlled feature definitions. - * Materialize \(load\) feature values into the online store. - * Build and retrieve training datasets from the offline store. - * Retrieve online features. -* **Online Store:** The online store is a database that stores only the latest feature values for each entity. The online store is populated by materialization jobs. -* **Offline Store:** The offline store persists batch data that has been ingested into Feast. This data is used for producing training datasets. Feast does not manage the offline store directly, but runs queries against it. - -{% hint style="info" %} -Java and Go Clients are also available for online feature retrieval. See [API Reference](../feast-on-kubernetes/reference-1/api/). -{% endhint %} - diff --git a/docs/concepts/entities.md b/docs/concepts/entities.md deleted file mode 100644 index dadeac1cac..0000000000 --- a/docs/concepts/entities.md +++ /dev/null @@ -1,2 +0,0 @@ -# Entities - diff --git a/docs/concepts/feature-tables.md b/docs/concepts/feature-tables.md deleted file mode 100644 index a27e0c000b..0000000000 --- a/docs/concepts/feature-tables.md +++ /dev/null @@ -1,122 +0,0 @@ -# Feature Tables - -## Overview - -Feature tables are both a schema and a logical means of grouping features, data [sources](sources.md), and other related metadata. - -Feature tables serve the following purposes: - -* Feature tables are a means for defining the location and properties of data [sources](sources.md). -* Feature tables are used to create within Feast a database-level structure for the storage of feature values. -* The data sources described within feature tables allow Feast to find and ingest feature data into stores within Feast. -* Feature tables ensure data is efficiently stored during [ingestion](../user-guide/define-and-ingest-features.md) by providing a grouping mechanism of features values that occur on the same event timestamp. - -{% hint style="info" %} -Feast does not yet apply feature transformations. Transformations are currently expected to happen before data is ingested into Feast. The data sources described within feature tables should reference feature values in their already transformed form. -{% endhint %} - -### Features - -A feature is an individual measurable property observed on an entity. For example the amount of transactions \(feature\) a customer \(entity\) has completed. Features are used for both model training and scoring \(batch, online\). - -Features are defined as part of feature tables. Since Feast does not apply transformations, a feature is basically a schema that only contains a name and a type: - -```python -avg_daily_ride = Feature("average_daily_rides", ValueType.FLOAT) -``` - -Visit [FeatureSpec](https://api.docs.feast.dev/grpc/feast.core.pb.html#FeatureSpecV2) for the complete feature specification API. - -## Structure of a Feature Table - -Feature tables contain the following fields: - -* **Name:** Name of feature table. This name must be unique within a project. -* **Entities:** List of [entities](entities.md) to associate with the features defined in this feature table. Entities are used as lookup keys when retrieving features from a feature table. -* **Features:** List of features within a feature table. -* **Labels:** Labels are arbitrary key-value properties that can be defined by users. -* **Max age:** Max age affect the retrieval of features from a feature table. Age is measured as the duration of time between the event timestamp of a feature and the lookup time on an [entity key](glossary.md#entity-key) used to retrieve the feature. Feature values outside max age will be returned as unset values. Max age allows for eviction of keys from online stores and limits the amount of historical scanning required for historical feature values during retrieval. -* **Batch Source:** The batch data source from which Feast will ingest feature values into stores. This can either be used to back-fill stores before switching over to a streaming source, or it can be used as the primary source of data for a feature table. Visit [Sources](sources.md) to learn more about batch sources. -* **Stream Source:** The streaming data source from which you can ingest streaming feature values into Feast. Streaming sources must be paired with a batch source containing the same feature values. A streaming source is only used to populate online stores. The batch equivalent source that is paired with a streaming source is used during the generation of historical feature datasets. Visit [Sources](sources.md) to learn more about stream sources. - -Here is a ride-hailing example of a valid feature table specification: - -{% tabs %} -{% tab title="driver\_trips\_feature\_table.py" %} -```python -from feast import BigQuerySource, FeatureTable, Feature, ValueType -from google.protobuf.duration_pb2 import Duration - -driver_ft = FeatureTable( - name="driver_trips", - entities=["driver_id"], - features=[ - Feature("average_daily_rides", ValueType.FLOAT), - Feature("rating", ValueType.FLOAT) - ], - max_age=Duration(seconds=3600), - labels={ - "team": "driver_matching" - }, - batch_source=BigQuerySource( - table_ref="gcp_project:bq_dataset.bq_table", - event_timestamp_column="datetime", - created_timestamp_column="timestamp", - field_mapping={ - "rating": "driver_rating" - } - ) -) -``` -{% endtab %} -{% endtabs %} - -By default, Feast assumes that features specified in the feature-table specification corresponds one-to-one to the fields found in the sources. All features defined in a feature table should be available in the defined sources. - -Field mappings can be used to map features defined in Feast to fields as they occur in data sources. - -In the example feature-specification table above, we use field mappings to ensure the feature named `rating` in the batch source is mapped to the field named `driver_rating`. - -## Working with a Feature Table - -#### Creating a Feature Table - -```python -driver_ft = FeatureTable(...) -client.apply(driver_ft) -``` - -#### Updating a Feature Table - -```python -driver_ft = FeatureTable() - -client.apply(driver_ft) - -driver_ft.labels = {"team": "marketplace"} - -client.apply(driver_ft) -``` - -#### Feast currently supports the following changes to feature tables: - -* Adding new features. -* Removing features. -* Updating source, max age, and labels. - -{% hint style="warning" %} -Deleted features are archived, rather than removed completely. Importantly, new features cannot use the names of these deleted features. -{% endhint %} - -#### Feast currently does not support the following changes to feature tables: - -* Changes to the project or name of a feature table. -* Changes to entities related to a feature table. -* Changes to names and types of existing features. - -#### Deleting a Feature Table - -{% hint style="danger" %} -Feast currently does not support the deletion of feature tables. -{% endhint %} - diff --git a/docs/concepts/feature-view.md b/docs/concepts/feature-view.md deleted file mode 100644 index 8326723fd6..0000000000 --- a/docs/concepts/feature-view.md +++ /dev/null @@ -1,71 +0,0 @@ -# Feature view - -### Feature View - -A feature view is an object that represents a logical group of time-series feature data as it is found in a [data source](feature-view.md#data-source). Feature views consist of one or more [entities](feature-view.md#entity), [features](feature-view.md#feature), and a [data source](feature-view.md#data-source). Feature views allow Feast to model your existing feature data in a consistent way in both an offline \(training\) and online \(serving\) environment. - -{% tabs %} -{% tab title="driver\_trips\_feature\_view.py" %} -```python -driver_stats_fv = FeatureView( - name="driver_activity", - entities=["driver"], - features=[ - Feature(name="trips_today", dtype=ValueType.INT64), - Feature(name="rating", dtype=ValueType.FLOAT), - ], - input=BigQuerySource( - table_ref="feast-oss.demo_data.driver_activity" - ) -) -``` -{% endtab %} -{% endtabs %} - -Feature views are used during - -* The generation of training datasets by querying the data source of feature views in order to find historical feature values. A single training dataset may consist of features from multiple feature views. -* Loading of feature values into an online store. Feature views determine the storage schema in the online store. -* Retrieval of features from the online store. Feature views provide the schema definition to Feast in order to look up features from the online store. - -{% hint style="info" %} -Feast does not generate feature values. It acts as the ingestion and serving system. The data sources described within feature views should reference feature values in their already computed form. -{% endhint %} - -### Data Source - -Feast uses a time-series data model to represent data. This data model is used to interpret feature data in data sources in order to build training datasets or when materializing features into an online store. - -Below is an example data source with a single entity \(`driver`\) and two features \(`trips_today`, and `rating`\). - -![Ride-hailing data source](../.gitbook/assets/image%20%2816%29.png) - -### Entity - -An entity is a collection of semantically related features. Users define entities to map to the domain of their use case. For example, a ride-hailing service could have customers and drivers as their entities, which group related features that correspond to these customers and drivers. - -```python -driver = Entity(name='driver', value_type=ValueType.STRING, join_key='driver_id') -``` - -Entities are defined as part of feature views. Entities are used to identify the primary key on which feature values should be stored and retrieved. These keys are used during the lookup of feature values from the online store and the join process in point-in-time joins. It is possible to define composite entities \(more than one entity object\) in a feature view. - -Entities should be reused across feature views. - -### Feature - -A feature is an individual measurable property observed on an entity. For example, a feature of a `customer` entity could be the number of transactions they have made on an average month. - -Features are defined as part of feature views. Since Feast does not transform data, a feature is essentially a schema that only contains a name and a type: - -```python -trips_today = Feature( - name="trips_today", - dtype=ValueType.FLOAT -) -``` - -Together with [data sources](data-model-and-concepts.md#data-source), they indicate to Feast where to find your feature values, e.g., in a specific parquet file or BigQuery table. Feature definitions are also used when reading features from the feature store, using [feature references](data-model-and-concepts.md#feature-references). - -Feature names must be unique within a [feature view](data-model-and-concepts.md#feature-view). - diff --git a/docs/concepts/feature-views.md b/docs/concepts/feature-views.md deleted file mode 100644 index dc145264cc..0000000000 --- a/docs/concepts/feature-views.md +++ /dev/null @@ -1,123 +0,0 @@ -# Feature Views - -### Overview - -Feature views are objects used to define and productionize logical groups of features for training and serving. - -Feature views serve the following purposes: - -* Feature views are a means for defining the location and properties of data sources that contain features. -* The data sources described within feature views allow Feast to find and materialize feature data into stores. -* Feature views ensure data is efficiently stored during materialization by providing a grouping mechanism of feature values that occur on the same event timestamp. -* Features are referenced relative to their feature view during the lookup of features, e.g., `driver_feature_view:driver_rating`. - -{% hint style="info" %} -Feast does not yet apply feature transformations. Feast acts as the productionization layer for pre-existing features. The data sources described within feature views should reference feature values in their already transformed form. -{% endhint %} - -Entities, features, and sources must be defined in order to define a feature view. - -### Entity - -Define an entity for the driver. Entities can be thought of as primary keys used to retrieve features. Entities are also used to join multiple tables/views during the construction of feature vectors. - -```python -driver = Entity( - # Name of the entity. Must be unique within a project - name="driver", - - # The join key of an entity describes the storage level field/column on which - # features can be looked up. The join key is also used to join feature - # tables/views when building feature vectors - join_key="driver_id", - - # The storage level type for an entity - value_type=ValueType.INT64 -) -``` - -### Feature - -A feature is an individual measurable property observed on an entity. For example, the amount of transactions \(feature\) a customer \(entity\) has completed. - -Features are defined as part of feature views. Since Feast does not transform data, a feature is essentially a schema that only contains a name and a type: - -```python -conversion_rate = Feature( - # Name of the feature. Used during lookup of feautres from the feature store - # The name must be unique - name="conv_rate", - - # The type used for storage of features (both at source and when materialized - # into a store) - dtype=ValueType.FLOAT -) -``` - -### Source - -Indicates a data source from which feature values can be retrieved. Sources are queried when building training datasets or materializing features into an online store. - -```python - -driver_stats_source = BigQuerySource( - # The BigQuery table where features can be found - table_ref="feast-oss.demo_data.driver_stats", - - # The event timestamp is used for point-in-time joins and for ensuring only - # features within the TTL are returned - event_timestamp_column="datetime", - - # The (optional) created timestamp is used to ensure there are no duplicate - # feature rows in the offline store or when building training datasets - created_timestamp_column="created", -) -``` - -### Feature View - -A Feature View is a - -{% tabs %} -{% tab title="driver\_trips\_feature\_table.py" %} -```python -driver_stats_fv = FeatureView( - # The unique name of this feature view. Two feature views in a single - # project cannot have the same name - name="driver_stats", - - # The list of entities specifies the keys required for joining or looking - # up features from this feature view. The reference provided in this field - # correspond to the name of a defined entity (or entities) - entities=["driver"], - - # The timedelta is the maximum age that each feature value may have - # relative to its lookup time. For historical features (used in training), - # TTL is relative to each timestamp provided in the entity dataframe. - # TTL also allows for eviction of keys from online stores and limits the - # amount of historical scanning required for historical feature values - # during retrieval - ttl=timedelta(weeks=1), - - # The list of features defined below act as a schema to both define features - # for both materialization of features into a store, and are used as references - # during retrieval for building a training dataset or serving features - features=[ - Feature(name="conv_rate", dtype=ValueType.FLOAT), - Feature(name="acc_rate", dtype=ValueType.FLOAT), - Feature(name="avg_daily_trips", dtype=ValueType.INT64), - ], - - # Inputs are used to find feature values. In the case of this feature - # view we will query a source table on BigQuery for driver statistics - # features - input=driver_stats_source, - - # Tags are user defined key/value pairs that are attached to each - # feature view - tags={"team": "driver_performance"}, -) -``` -{% endtab %} -{% endtabs %} - diff --git a/docs/concepts/glossary.md b/docs/concepts/glossary.md deleted file mode 100644 index 0c458c7c4f..0000000000 --- a/docs/concepts/glossary.md +++ /dev/null @@ -1,36 +0,0 @@ -# Glossary - -#### **Entity key** - -The combination of entities that uniquely identify a row. For example, a feature table with the composite entity of \(customer, country\) might have an entity key of \(1001, 5\). The key is used during lookups of feature values and for deduplicating historical rows. - -#### Entity timestamp - -The timestamp on which an event occurred. The entity timestamp could describe the event time at which features were calculated, or it could describe the event timestamps at which outcomes were observed. - -Entity timestamps are commonly found on the entity dataframe and associated with the target variable \(outcome\) that needs to be predicted. These timestamps are the target on which point-in-time joins should be made. - -#### Entity rows - -A combination of a single [entity key ](glossary.md#entity-key)and a single [entity timestamp](glossary.md#entity-timestamp). - -#### Entity dataframe - -A collection of [entity rows](glossary.md#entity-rows). This dataframe is enriched with feature values before being used for model training. - -#### Feature References - -Feature references uniquely identify feature values throughout Feast. Feature references can either be defined as objects or as strings. - -The structure of a feature reference in string form is as follows: - -`feature_table:feature` - -Example: - -`drivers_stream:unique_drivers` - -Feature references are unique within a project. It is not possible to reference \(or retrieve\) features from multiple projects at the same time. - -\*\*\*\* - diff --git a/docs/concepts/provider.md b/docs/concepts/provider.md deleted file mode 100644 index 8941f61b05..0000000000 --- a/docs/concepts/provider.md +++ /dev/null @@ -1,10 +0,0 @@ -# Provider - -A provider is an implementation of a feature store using specific feature store components targeting a specific environment**.** More specifically, a provider is the target environment to which you have configured your feature store to deploy and run. - -Providers are built to orchestrate various components \(offline store, online store, infrastructure, compute\) inside an environment. For example, the `gcp` provider supports [BigQuery](https://cloud.google.com/bigquery) as an offline store and [Datastore](https://cloud.google.com/datastore) as an online store, ensuring that these components can work together seamlessly. - -Providers also come with default configurations which makes it easier for users to start a feature store in a specific environment. - -Please see [feature\_store.yaml](../reference/feature-repository/feature-store-yaml.md#overview) for configuring providers. - diff --git a/docs/concepts/sources.md b/docs/concepts/sources.md deleted file mode 100644 index a76d395d09..0000000000 --- a/docs/concepts/sources.md +++ /dev/null @@ -1,2 +0,0 @@ -# Sources - diff --git a/docs/concepts/stores.md b/docs/concepts/stores.md deleted file mode 100644 index 3695f6c37d..0000000000 --- a/docs/concepts/stores.md +++ /dev/null @@ -1,26 +0,0 @@ -# Stores - -In Feast, a store is a database that is populated with feature data that will ultimately be served to models. - -### Offline \(Historical\) Store - -The offline store maintains historical copies of feature values. These features are grouped and stored in feature tables. During retrieval of historical data, features are queries from these feature tables in order to produce training datasets. - -{% hint style="warning" %} -Feast 0.8 does not support offline storage. Support will be added in Feast 0.9. -{% endhint %} - -### Online Store - -The online store maintains only the latest values for a specific feature. - -* Feature values are stored based on their [entity keys](glossary.md#entity-key) -* Feast currently supports Redis as an online store. -* Online stores are meant for very high throughput writes from ingestion jobs and very low latency access to features during online serving. - -{% hint style="info" %} -Feast only supports a single online store in production -{% endhint %} - - - diff --git a/docs/create-a-feature-repository.md b/docs/create-a-feature-repository.md deleted file mode 100644 index 5f781f0651..0000000000 --- a/docs/create-a-feature-repository.md +++ /dev/null @@ -1,2 +0,0 @@ -# Create a feature repository - diff --git a/docs/deploy-a-feature-store.md b/docs/deploy-a-feature-store.md deleted file mode 100644 index 0447b0ffbf..0000000000 --- a/docs/deploy-a-feature-store.md +++ /dev/null @@ -1,2 +0,0 @@ -# Deploy a feature store - diff --git a/docs/entities.md b/docs/entities.md deleted file mode 100644 index dadeac1cac..0000000000 --- a/docs/entities.md +++ /dev/null @@ -1,2 +0,0 @@ -# Entities - diff --git a/docs/feast-on-kubernetes/advanced-1/README.md b/docs/feast-on-kubernetes/advanced-1/README.md deleted file mode 100644 index 0fb91367c2..0000000000 --- a/docs/feast-on-kubernetes/advanced-1/README.md +++ /dev/null @@ -1,2 +0,0 @@ -# Advanced - diff --git a/docs/feast-on-kubernetes/advanced-1/audit-logging.md b/docs/feast-on-kubernetes/advanced-1/audit-logging.md deleted file mode 100644 index 1870a687bd..0000000000 --- a/docs/feast-on-kubernetes/advanced-1/audit-logging.md +++ /dev/null @@ -1,132 +0,0 @@ -# Audit Logging - -{% hint style="warning" %} -This page applies to Feast 0.7. The content may be out of date for Feast 0.8+ -{% endhint %} - -## Introduction - -Feast provides audit logging functionality in order to debug problems and to trace the lineage of events. - -## Audit Log Types - -Audit Logs produced by Feast come in three favors: - -| Audit Log Type | Description | -| :--- | :--- | -| Message Audit Log | Logs service calls that can be used to track Feast request handling. Currently only gRPC request/response is supported. Enabling Message Audit Logs can be resource intensive and significantly increase latency, as such is not recommended on Online Serving. | -| Transition Audit Log | Logs transitions in status in resources managed by Feast \(ie an Ingestion Job becoming RUNNING\). | -| Action Audit Log | Logs actions performed on a specific resource managed by Feast \(ie an Ingestion Job is aborted\). | - -## Configuration - -| Audit Log Type | Description | -| :--- | :--- | -| Message Audit Log | Enabled when both `feast.logging.audit.enabled` and `feast.logging.audit.messageLogging.enabled` is set to `true` | -| Transition Audit Log | Enabled when `feast.logging.audit.enabled` is set to `true` | -| Action Audit Log | Enabled when `feast.logging.audit.enabled` is set to `true` | - -## JSON Format - -Audit Logs produced by Feast are written to the console similar to normal logs but in a structured, machine parsable JSON. Example of a Message Audit Log JSON entry produced: - -```text -{ - "message": { - "logType": "FeastAuditLogEntry", - "kind": "MESSAGE", - "statusCode": "OK", - "request": { - "filter": { - "project": "dummy", - } - }, - "application": "Feast", - "response": {}, - "method": "ListFeatureTables", - "identity": "105960238928959148073", - "service": "CoreService", - "component": "feast-core", - "id": "45329ea9-0d48-46c5-b659-4604f6193711", - "version": "0.10.0-SNAPSHOT" - }, - "hostname": "feast.core" - "timestamp": "2020-10-20T04:45:24Z", - "severity": "INFO", -} -``` - -## Log Entry Schema - -Fields common to all Audit Log Types: - -| Field | Description | -| :--- | :--- | -| `logType` | Log Type. Always set to `FeastAuditLogEntry`. Useful for filtering out Feast audit logs. | -| `application` | Application. Always set to `Feast`. | -| `component` | Feast Component producing the Audit Log. Set to `feast-core` for Feast Core and `feast-serving` for Feast Serving. Use to filtering out Audit Logs by component. | -| `version` | Version of Feast producing this Audit Log. Use to filtering out Audit Logs by version. | - -Fields in Message Audit Log Type - -| Field | Description | -| :--- | :--- | -| `id` | Generated UUID that uniquely identifies the service call. | -| `service` | Name of the Service that handled the service call. | -| `method` | Name of the Method that handled the service call. Useful for filtering Audit Logs by method \(ie `ApplyFeatureTable` calls\) | -| `request` | Full request submitted by client in the service call as JSON. | -| `response` | Full response returned to client by the service after handling the service call as JSON. | -| `identity` | Identity of the client making the service call as an user Id. Only set when Authentication is enabled. | -| `statusCode` | The status code returned by the service handling the service call \(ie `OK` if service call handled without error\). | - -Fields in Action Audit Log Type - -| Field | Description | -| :--- | :--- | -| `action` | Name of the action taken on the resource. | -| `resource.type` | Type of resource of which the action was taken on \(i.e `FeatureTable`\) | -| resource.id | Identifier specifying the specific resource of which the action was taken on. | - -Fields in Transition Audit Log Type - -| Field | Description | -| :--- | :--- | -| `status` | The new status that the resource transitioned to | -| `resource.type` | Type of resource of which the transition occurred \(i.e `FeatureTable`\) | -| `resource.id` | Identifier specifying the specific resource of which the transition occurred. | - -## Log Forwarder - -Feast currently only supports forwarding Request/Response \(Message Audit Log Type\) logs to an external fluentD service with `feast.**` Fluentd tag. - -### Request/Response Log Example - -```text -{ - "id": "45329ea9-0d48-46c5-b659-4604f6193711", - "service": "CoreService" - "status_code": "OK", - "identity": "105960238928959148073", - "method": "ListProjects", - "request": {}, - "response": { - "projects": [ - "default", "project1", "project2" - ] - } - "release_name": 506.457.14.512 -} -``` - -### Configuration - -The Fluentd Log Forwarder configured with the with the following configuration options in `application.yml`: - -| Settings | Description | -| :--- | :--- | -| `feast.logging.audit.messageLogging.destination` | `fluentd` | -| `feast.logging.audit.messageLogging.fluentdHost` | `localhost` | -| `feast.logging.audit.messageLogging.fluentdPort` | `24224` | - -When using Fluentd as the Log forwarder, a Feast `release_name` can be logged instead of the IP address \(eg. IP of Kubernetes pod deployment\), by setting an environment variable `RELEASE_NAME` when deploying Feast. - diff --git a/docs/feast-on-kubernetes/advanced-1/metrics.md b/docs/feast-on-kubernetes/advanced-1/metrics.md deleted file mode 100644 index 43f7b973b6..0000000000 --- a/docs/feast-on-kubernetes/advanced-1/metrics.md +++ /dev/null @@ -1,59 +0,0 @@ -# Metrics - -{% hint style="warning" %} -This page applies to Feast 0.7. The content may be out of date for Feast 0.8+ -{% endhint %} - -## Overview - -Feast Components export metrics that can provide insight into Feast behavior: - -* [Feast Ingestion Jobs can be configured to push metrics into StatsD](metrics.md#pushing-ingestion-metrics-to-statsd) -* [Prometheus can be configured to scrape metrics from Feast Core and Serving.](metrics.md#exporting-feast-metrics-to-prometheus) - -See the [Metrics Reference ](../reference-1/metrics-reference.md)for documentation on metrics are exported by Feast. - -{% hint style="info" %} -Feast Job Controller currently does not export any metrics on its own. However its `application.yml` is used to configure metrics export for ingestion jobs. -{% endhint %} - -## Pushing Ingestion Metrics to StatsD - -### **Feast Ingestion Job** - -Feast Ingestion Job can be configured to push Ingestion metrics to a StatsD instance. Metrics export to StatsD for Ingestion Job is configured in Job Controller's `application.yml` under `feast.jobs.metrics` - -```yaml - feast: - jobs: - metrics: - # Enables Statd metrics export if true. - enabled: true - type: statsd - # Host and port of the StatsD instance to export to. - host: localhost - port: 9125 -``` - -{% hint style="info" %} -If you need Ingestion Metrics in Prometheus or some other metrics backend, use a metrics forwarder to forward Ingestion Metrics from StatsD to the metrics backend of choice. \(ie Use [`prometheus-statsd-exporter`](https://github.com/prometheus/statsd_exporter) to forward metrics to Prometheus\). -{% endhint %} - -## Exporting Feast Metrics to Prometheus - -### **Feast Core and Serving** - -Feast Core and Serving exports metrics to a Prometheus instance via Prometheus scraping its `/metrics` endpoint. Metrics export to Prometheus for Core and Serving can be configured via their corresponding `application.yml` - -```yaml -server: - # Configures the port where metrics are exposed via /metrics for Prometheus to scrape. - port: 8081 -``` - -[Direct Prometheus](https://prometheus.io/docs/prometheus/latest/configuration/configuration/#scrape_config) to scrape directly from Core and Serving's `/metrics` endpoint. - -## Further Reading - -See the [Metrics Reference ](../reference-1/metrics-reference.md)for documentation on metrics are exported by Feast. - diff --git a/docs/feast-on-kubernetes/advanced-1/security.md b/docs/feast-on-kubernetes/advanced-1/security.md deleted file mode 100644 index b6e42afd73..0000000000 --- a/docs/feast-on-kubernetes/advanced-1/security.md +++ /dev/null @@ -1,480 +0,0 @@ ---- -description: 'Secure Feast with SSL/TLS, Authentication and Authorization.' ---- - -# Security - -{% hint style="warning" %} -This page applies to Feast 0.7. The content may be out of date for Feast 0.8+ -{% endhint %} - -### Overview - -![Overview of Feast's Security Methods.](../../.gitbook/assets/untitled-25-1-%20%282%29%20%282%29%20%282%29%20%283%29%20%283%29%20%283%29%20%283%29%20%283%29%20%283%29%20%281%29%20%284%29.jpg) - -Feast supports the following security methods: - -* [SSL/TLS on messaging between Feast Core, Feast Online Serving and Feast SDKs.](security.md#2-ssl-tls) -* [Authentication to Feast Core and Serving based on Open ID Connect ID tokens.](security.md#3-authentication) -* [Authorization based on project membership and delegating authorization grants to external Authorization Server.](security.md#4-authorization) - -[Important considerations when integrating Authentication/Authorization](security.md#5-authentication-and-authorization). - -### **SSL/TLS** - -Feast supports SSL/TLS encrypted inter-service communication among Feast Core, Feast Online Serving, and Feast SDKs. - -#### Configuring SSL/TLS on Feast Core and Feast Serving - -The following properties configure SSL/TLS. These properties are located in their corresponding `application.yml`files: - -| Configuration Property | Description | -| :--- | :--- | -| `grpc.server.security.enabled` | Enables SSL/TLS functionality if `true` | -| `grpc.server.security.certificateChain` | Provide the path to certificate chain. | -| `grpc.server.security.privateKey` | Provide the to private key. | - -> Read more on enabling SSL/TLS in the[ gRPC starter docs.](https://yidongnan.github.io/grpc-spring-boot-starter/en/server/security.html#enable-transport-layer-security) - -#### Configuring SSL/TLS on Python SDK/CLI - -To enable SSL/TLS in the [Feast Python SDK](https://api.docs.feast.dev/python/#feast.client.Client) or [Feast CLI](../getting-started/connect-to-feast/feast-cli.md), set the config options via `feast config`: - -| Configuration Option | Description | -| :--- | :--- | -| `core_enable_ssl` | Enables SSL/TLS functionality on connections to Feast core if `true` | -| `serving_enable_ssl` | Enables SSL/TLS functionality on connections to Feast Online Serving if `true` | -| `core_server_ssl_cert` | Optional. Specifies the path of the root certificate used to verify Core Service's identity. If omitted, uses system certificates. | -| `serving_server_ssl_cert` | Optional. Specifies the path of the root certificate used to verify Serving Service's identity. If omitted, uses system certificates. | - -{% hint style="info" %} -The Python SDK automatically uses SSL/TLS when connecting to Feast Core and Feast Online Serving via port 443. -{% endhint %} - -#### Configuring SSL/TLS on Go SDK - -Configure SSL/TLS on the [Go SDK](https://godoc.org/github.com/feast-dev/feast/sdk/go) by passing configuration via `SecurityConfig`: - -```go -cli, err := feast.NewSecureGrpcClient("localhost", 6566, feast.SecurityConfig{ - EnableTLS: true, - TLSCertPath: "/path/to/cert.pem", -})Option -``` - -| Config Option | Description | -| :--- | :--- | -| `EnableTLS` | Enables SSL/TLS functionality when connecting to Feast if `true` | -| `TLSCertPath` | Optional. Provides the path of the root certificate used to verify Feast Service's identity. If omitted, uses system certificates. | - -#### Configuring SSL/TLS on **Java** SDK - -Configure SSL/TLS on the [Feast Java SDK](https://javadoc.io/doc/dev.feast/feast-sdk) by passing configuration via `SecurityConfig`: - -```java -FeastClient client = FeastClient.createSecure("localhost", 6566, - SecurityConfig.newBuilder() - .setTLSEnabled(true) - .setCertificatePath(Optional.of("/path/to/cert.pem")) - .build()); -``` - -| Config Option | Description | -| :--- | :--- | -| `setTLSEnabled()` | Enables SSL/TLS functionality when connecting to Feast if `true` | -| `setCertificatesPath()` | Optional. Set the path of the root certificate used to verify Feast Service's identity. If omitted, uses system certificates. | - -### **Authentication** - -{% hint style="warning" %} -To prevent man in the middle attacks, we recommend that SSL/TLS be implemented prior to authentication. -{% endhint %} - -Authentication can be implemented to identify and validate client requests to Feast Core and Feast Online Serving. Currently, Feast uses[ ](https://auth0.com/docs/protocols/openid-connect-protocol)[Open ID Connect \(OIDC\)](https://auth0.com/docs/protocols/openid-connect-protocol) ID tokens \(i.e. [Google Open ID Connect](https://developers.google.com/identity/protocols/oauth2/openid-connect)\) to authenticate client requests. - -#### Configuring Authentication in Feast Core and Feast Online Serving - -Authentication can be configured for Feast Core and Feast Online Serving via properties in their corresponding `application.yml` files: - -| Configuration Property | Description | -| :--- | :--- | -| `feast.security.authentication.enabled` | Enables Authentication functionality if `true` | -| `feast.security.authentication.provider` | Authentication Provider type. Currently only supports `jwt` | -| `feast.security.authentication.option.jwkEndpointURI` | HTTPS URL used by Feast to retrieved the [JWK](https://tools.ietf.org/html/rfc7517) used to verify OIDC ID tokens. | - -{% hint style="info" %} -`jwkEndpointURI`is set to retrieve Google's OIDC JWK by default, allowing OIDC ID tokens issued by Google to be used for authentication. -{% endhint %} - -Behind the scenes, Feast Core and Feast Online Serving authenticate by: - -* Extracting the OIDC ID token `TOKEN`from gRPC metadata submitted with request: - -```text -('authorization', 'Bearer: TOKEN') -``` - -* Validates token's authenticity using the JWK retrieved from the `jwkEndpointURI` - -#### **Authenticating Serving with Feast Core** - -Feast Online Serving communicates with Feast Core during normal operation. When both authentication and authorization are enabled on Feast Core, Feast Online Serving is forced to authenticate its requests to Feast Core. Otherwise, Feast Online Serving produces an Authentication failure error when connecting to Feast Core. - - Properties used to configure Serving authentication via `application.yml`: - -| Configuration Property | Description | -| :--- | :--- | -| `feast.core-authentication.enabled` | Requires Feast Online Serving to authenticate when communicating with Feast Core. | -| `feast.core-authentication.provider` | Selects provider Feast Online Serving uses to retrieve credentials then used to authenticate requests to Feast Core. Valid providers are `google` and `oauth`. | - -{% tabs %} -{% tab title="Google Provider" %} -Google Provider automatically extracts the credential from the credential JSON file. - -* Set [`GOOGLE_APPLICATION_CREDENTIALS` environment variable](https://cloud.google.com/docs/authentication/getting-started#setting_the_environment_variable) to the path of the credential in the JSON file. -{% endtab %} - -{% tab title="OAuth Provider" %} -OAuth Provider makes an OAuth [client credentials](https://auth0.com/docs/flows/call-your-api-using-the-client-credentials-flow) request to obtain the credential. OAuth requires the following options to be set at `feast.security.core-authentication.options.`: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
Configuration PropertyDescription
oauth_url - Target URL receiving the client-credentials request.
grant_type - OAuth grant type. Set as client_credentials -
client_id - Client Id used in the client-credentials request.
client_secret - Client secret used in the client-credentials request.
audience - -

Target audience of the credential. Set to host URL of Feast Core.

-

(i.e. https://localhost if Feast Core listens on localhost).

-
jwkEndpointURI - HTTPS URL used to retrieve a JWK that can be used to decode the credential.
-{% endtab %} -{% endtabs %} - -#### **Enabling Authentication in Python SDK/CLI** - -Configure the [Feast Python SDK](https://api.docs.feast.dev/python/) and [Feast CLI](../getting-started/connect-to-feast/feast-cli.md) to use authentication via `feast config`: - -```python -$ feast config set enable_auth true -``` - -| Configuration Option | Description | -| :--- | :--- | -| `enable_auth` | Enables authentication functionality if set to `true`. | -| `auth_provider` | Use an authentication provider to obtain a credential for authentication. Currently supports `google` and `oauth`. | -| `auth_token` | Manually specify a static token for use in authentication. Overrules `auth_provider` if both are set. | - -{% tabs %} -{% tab title="Google Provider" %} -Google Provider automatically finds and uses Google Credentials to authenticate requests: - -* Google Provider automatically uses established credentials for authenticating requests if you are already authenticated with the `gcloud` CLI via: - -```text -$ gcloud auth application-default login -``` - -* Alternatively Google Provider can be configured to use the credentials in the JSON file via`GOOGLE_APPLICATION_CREDENTIALS` environmental variable \([Google Cloud Authentication documentation](https://cloud.google.com/docs/authentication/getting-started)\): - -```bash -$ export GOOGLE_APPLICATION_CREDENTIALS="path/to/key.json" -``` -{% endtab %} - -{% tab title="OAuth Provider" %} -OAuth Provider makes an OAuth [client credentials](https://auth0.com/docs/flows/call-your-api-using-the-client-credentials-flow) request to obtain the credential/token used to authenticate Feast requests. The OAuth provider requires the following config options to be set via `feast config`: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
Configuration PropertyDescription
oauth_token_request_url - Target URL receiving the client-credentials request.
oauth_grant_type - OAuth grant type. Set as client_credentials -
oauth_client_id - Client Id used in the client-credentials request.
oauth_client_secret - Client secret used in the client-credentials request.
oauth_audience - -

Target audience of the credential. Set to host URL of target Service.

-

(https://localhost if Service listens on localhost).

-
-{% endtab %} -{% endtabs %} - -#### **Enabling Authentication in Go SDK** - -Configure the [Feast Java SDK](https://javadoc.io/doc/dev.feast/feast-sdk/latest/com/gojek/feast/package-summary.html) to use authentication by specifying the credential via `SecurityConfig`: - -```go -// error handling omitted. -// Use Google Credential as provider. -cred, _ := feast.NewGoogleCredential("localhost:6566") -cli, _ := feast.NewSecureGrpcClient("localhost", 6566, feast.SecurityConfig{ - // Specify the credential to provide tokens for Feast Authentication. - Credential: cred, -}) -``` - -{% tabs %} -{% tab title="Google Credential" %} -Google Credential uses Service Account credentials JSON file set via`GOOGLE_APPLICATION_CREDENTIALS` environmental variable \([Google Cloud Authentication documentation](https://cloud.google.com/docs/authentication/getting-started)\) to obtain tokens for Authenticating Feast requests: - -* Exporting `GOOGLE_APPLICATION_CREDENTIALS` - -```bash -$ export GOOGLE_APPLICATION_CREDENTIALS="path/to/key.json" -``` - -* Create a Google Credential with target audience. - -```go -cred, _ := feast.NewGoogleCredential("localhost:6566") -``` - -> Target audience of the credential should be set to host URL of target Service. \(ie `https://localhost` if Service listens on `localhost`\): -{% endtab %} - -{% tab title="OAuth Credential" %} -OAuth Credential makes an OAuth [client credentials](https://auth0.com/docs/flows/call-your-api-using-the-client-credentials-flow) request to obtain the credential/token used to authenticate Feast requests: - -* Create OAuth Credential with parameters: - -```go -cred := feast.NewOAuthCredential("localhost:6566", "client_id", "secret", "https://oauth.endpoint/auth") -``` - - - - - - - - - - - - - - - - - - - - - - - - - - -
ParameterDescription
audience - -

Target audience of the credential. Set to host URL of target Service.

-

( https://localhost if Service listens on localhost).

-
clientId - Client Id used in the client-credentials request.
clientSecret - Client secret used in the client-credentials request.
endpointURL - Target URL to make the client-credentials request to.
-{% endtab %} -{% endtabs %} - -#### **Enabling Authentication in Java SDK** - -Configure the [Feast Java SDK](https://javadoc.io/doc/dev.feast/feast-sdk/latest/com/gojek/feast/package-summary.html) to use authentication by setting credentials via `SecurityConfig`: - -```java -// Use GoogleAuthCredential as provider. -CallCredentials credentials = new GoogleAuthCredentials( - Map.of("audience", "localhost:6566")); - -FeastClient client = FeastClient.createSecure("localhost", 6566, - SecurityConfig.newBuilder() - // Specify the credentials to provide tokens for Feast Authentication. - .setCredentials(Optional.of(creds)) - .build()); -``` - -{% tabs %} -{% tab title="GoogleAuthCredentials" %} -GoogleAuthCredentials uses Service Account credentials JSON file set via`GOOGLE_APPLICATION_CREDENTIALS` environmental variable \([Google Cloud authentication documentation](https://cloud.google.com/docs/authentication/getting-started)\) to obtain tokens for Authenticating Feast requests: - -* Exporting `GOOGLE_APPLICATION_CREDENTIALS` - -```bash -$ export GOOGLE_APPLICATION_CREDENTIALS="path/to/key.json" -``` - -* Create a Google Credential with target audience. - -```java -CallCredentials credentials = new GoogleAuthCredentials( - Map.of("audience", "localhost:6566")); -``` - -> Target audience of the credentials should be set to host URL of target Service. \(ie `https://localhost` if Service listens on `localhost`\): -{% endtab %} - -{% tab title="OAuthCredentials" %} -OAuthCredentials makes an OAuth [client credentials](https://auth0.com/docs/flows/call-your-api-using-the-client-credentials-flow) request to obtain the credential/token used to authenticate Feast requests: - -* Create OAuthCredentials with parameters: - -```java -CallCredentials credentials = new OAuthCredentials(Map.of( - "audience": "localhost:6566", - "grant_type", "client_credentials", - "client_id", "some_id", - "client_id", "secret", - "oauth_url", "https://oauth.endpoint/auth", - "jwkEndpointURI", "https://jwk.endpoint/jwk")); -``` - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
ParameterDescription
audience - -

Target audience of the credential. Set to host URL of target Service.

-

( https://localhost if Service listens on localhost).

-
grant_type - OAuth grant type. Set as client_credentials -
client_id - Client Id used in the client-credentials request.
client_secret - Client secret used in the client-credentials request.
oauth_url - Target URL to make the client-credentials request to obtain credential.
jwkEndpointURI - HTTPS URL used to retrieve a JWK that can be used to decode the credential.
-{% endtab %} -{% endtabs %} - -### Authorization - -{% hint style="info" %} -Authorization requires that authentication be configured to obtain a user identity for use in authorizing requests. -{% endhint %} - -Authorization provides access control to FeatureTables and/or Features based on project membership. Users who are members of a project are authorized to: - -* Create and/or Update a Feature Table in the Project. -* Retrieve Feature Values for Features in that Project. - -#### **Authorization API/Server** - -![Feast Authorization Flow](../../.gitbook/assets/rsz_untitled23%20%282%29%20%282%29%20%282%29%20%283%29%20%283%29%20%283%29%20%283%29%20%283%29%20%283%29%20%283%29%20%283%29%20%283%29%20%283%29%20%283%29%20%283%29%20%283%29.jpg) - -Feast delegates Authorization grants to an external Authorization Server that implements the [Authorization Open API specification](https://github.com/feast-dev/feast/blob/master/common/src/main/resources/api.yaml). - -* Feast checks whether a user is authorized to make a request by making a `checkAccessRequest` to the Authorization Server. -* The Authorization Server should return a `AuthorizationResult` with whether the user is allowed to make the request. - -Authorization can be configured for Feast Core and Feast Online Serving via properties in their corresponding `application.yml` - -| Configuration Property | Description | -| :--- | :--- | -| `feast.security.authorization.enabled` | Enables authorization functionality if `true`. | -| `feast.security.authorization.provider` | Authentication Provider type. Currently only supports `http` | -| `feast.security.authorization.option.authorizationUrl` | URL endpoint of Authorization Server to make check access requests to. | -| `feast.security.authorization.option.subjectClaim` | Optional. Name of the claim of the to extract from the ID Token to include in the check access request as Subject. | - -{% hint style="info" %} -This example of the [Authorization Server with Keto](https://github.com/feast-dev/feast-keto-auth-server) can be used as a reference implementation for implementing an Authorization Server that Feast supports. -{% endhint %} - -### **Authentication & Authorization** - -When using Authentication & Authorization, consider: - -* Enabling Authentication without Authorization makes authentication **optional**. You can still send unauthenticated requests. -* Enabling Authorization forces all requests to be authenticated. Requests that are not authenticated are **dropped.** - - - diff --git a/docs/feast-on-kubernetes/advanced-1/troubleshooting.md b/docs/feast-on-kubernetes/advanced-1/troubleshooting.md deleted file mode 100644 index 7b0224abe3..0000000000 --- a/docs/feast-on-kubernetes/advanced-1/troubleshooting.md +++ /dev/null @@ -1,136 +0,0 @@ -# Troubleshooting - -{% hint style="warning" %} -This page applies to Feast 0.7. The content may be out of date for Feast 0.8+ -{% endhint %} - -If at any point in time you cannot resolve a problem, please see the [Community](../../community.md) section for reaching out to the Feast community. - -### How can I verify that all services are operational? - -#### Docker Compose - -The containers should be in an `up` state: - -```text -docker ps -``` - -#### Google Kubernetes Engine - -All services should either be in a `RUNNING` state or `COMPLETED`state: - -```text -kubectl get pods -``` - -### How can I verify that I can connect to all services? - -First locate the the host and port of the Feast Services. - -#### **Docker Compose \(from inside the docker network\)** - -You will probably need to connect using the hostnames of services and standard Feast ports: - -```bash -export FEAST_CORE_URL=core:6565 -export FEAST_ONLINE_SERVING_URL=online_serving:6566 -export FEAST_HISTORICAL_SERVING_URL=historical_serving:6567 -export FEAST_JOBCONTROLLER_URL=jobcontroller:6570 -``` - -#### **Docker Compose \(from outside the docker network\)** - -You will probably need to connect using `localhost` and standard ports: - -```bash -export FEAST_CORE_URL=localhost:6565 -export FEAST_ONLINE_SERVING_URL=localhost:6566 -export FEAST_HISTORICAL_SERVING_URL=localhost:6567 -export FEAST_JOBCONTROLLER_URL=localhost:6570 -``` - -#### **Google Kubernetes Engine \(GKE\)** - -You will need to find the external IP of one of the nodes as well as the NodePorts. Please make sure that your firewall is open for these ports: - -```bash -export FEAST_IP=$(kubectl describe nodes | grep ExternalIP | awk '{print $2}' | head -n 1) -export FEAST_CORE_URL=${FEAST_IP}:32090 -export FEAST_ONLINE_SERVING_URL=${FEAST_IP}:32091 -export FEAST_HISTORICAL_SERVING_URL=${FEAST_IP}:32092 -``` - -`netcat`, `telnet`, or even `curl` can be used to test whether all services are available and ports are open, but `grpc_cli` is the most powerful. It can be installed from [here](https://github.com/grpc/grpc/blob/master/doc/command_line_tool.md). - -#### Testing Connectivity From Feast Services: - -Use `grpc_cli` to test connetivity by listing the gRPC methods exposed by Feast services: - -```bash -grpc_cli ls ${FEAST_CORE_URL} feast.core.CoreService -``` - -```bash -grpc_cli ls ${FEAST_JOBCONTROLLER_URL} feast.core.JobControllerService -``` - -```bash -grpc_cli ls ${FEAST_HISTORICAL_SERVING_URL} feast.serving.ServingService -``` - -```bash -grpc_cli ls ${FEAST_ONLINE_SERVING_URL} feast.serving.ServingService -``` - -### How can I print logs from the Feast Services? - -Feast will typically have three services that you need to monitor if something goes wrong. - -* Feast Core -* Feast Job Controller -* Feast Serving \(Online\) -* Feast Serving \(Batch\) - -In order to print the logs from these services, please run the commands below. - -#### Docker Compose - -Use `docker-compose logs` to obtain Feast component logs: - -```text - docker logs -f feast_core_1 -``` - -```text - docker logs -f feast_jobcontroller_1 -``` - -```text -docker logs -f feast_historical_serving_1 -``` - -```text -docker logs -f feast_online_serving_1 -``` - -#### Google Kubernetes Engine - -Use `kubectl logs` to obtain Feast component logs: - -```text -kubectl logs $(kubectl get pods | grep feast-core | awk '{print $1}') -``` - -```text -kubectl logs $(kubectl get pods | grep feast-jobcontroller | awk '{print $1}') -``` - -```text -kubectl logs $(kubectl get pods | grep feast-serving-batch | awk '{print $1}') -``` - -```text -kubectl logs $(kubectl get pods | grep feast-serving-online | awk '{print $1}') -``` - diff --git a/docs/feast-on-kubernetes/advanced-1/upgrading.md b/docs/feast-on-kubernetes/advanced-1/upgrading.md deleted file mode 100644 index 7e61d3518b..0000000000 --- a/docs/feast-on-kubernetes/advanced-1/upgrading.md +++ /dev/null @@ -1,113 +0,0 @@ -# Upgrading Feast - -### Migration from v0.6 to v0.7 - -#### Feast Core Validation changes - -In v0.7, Feast Core no longer accepts starting with number \(0-9\) and using dash in names for: - -* Project -* Feature Set -* Entities -* Features - -Migrate all project, feature sets, entities, feature names: - -* with ‘-’ by recreating them with '-' replace with '\_' -* recreate any names with a number \(0-9\) as the first letter to one without. - -Feast now prevents feature sets from being applied if no store is subscribed to that Feature Set. - -* Ensure that a store is configured to subscribe to the Feature Set before applying the Feature Set. - -#### Feast Core's Job Coordinator is now Feast Job Controller - -In v0.7, Feast Core's Job Coordinator has been decoupled from Feast Core and runs as a separate Feast Job Controller application. See its [Configuration reference](../reference-1/configuration-reference.md#2-feast-core-serving-and-job-controller) for how to configure Feast Job Controller. - -**Ingestion Job API** - -In v0.7, the following changes are made to the Ingestion Job API: - -* Changed List Ingestion Job API to return list of `FeatureSetReference` instead of list of FeatureSet in response. -* Moved `ListIngestionJobs`, `StopIngestionJob`, `RestartIngestionJob` calls from `CoreService` to `JobControllerService`. -* Python SDK/CLI: Added new [Job Controller client ](https://github.com/feast-dev/feast/blob/master/sdk/python/feast/contrib/job_controller/client.py)and `jobcontroller_url` config option. - -Users of the Ingestion Job API via gRPC should migrate by: - -* Add new client to connect to Job Controller endpoint to call `JobControllerService` and call `ListIngestionJobs`, `StopIngestionJob`, `RestartIngestionJob` from new client. -* Migrate code to accept feature references instead of feature sets returned in `ListIngestionJobs` response. - -Users of Ingestion Job via Python SDK \(ie `feast ingest-jobs list` or `client.stop_ingest_job()` etc.\) should migrate by: - -* `ingest_job()`methods only: Create a new separate [Job Controller client](https://github.com/feast-dev/feast/blob/master/sdk/python/feast/contrib/job_controller/client.py) to connect to the job controller and call `ingest_job()` methods using the new client. -* Configure the Feast Job Controller endpoint url via `jobcontroller_url` config option. - -#### Configuration Properties Changes - -* Rename `feast.jobs.consolidate-jobs-per-source property` to `feast.jobs.controller.consolidate-jobs-per-sources` -* Rename`feast.security.authorization.options.subjectClaim` to `feast.security.authentication.options.subjectClaim` -* Rename `feast.logging.audit.messageLoggingEnabled` to `feast.audit.messageLogging.enabled` - -### Migration from v0.5 to v0.6 - -#### Database schema - -In Release 0.6 we introduced [Flyway](https://flywaydb.org/) to handle schema migrations in PostgreSQL. Flyway is integrated into `core` and for now on all migrations will be run automatically on `core` start. It uses table `flyway_schema_history` in the same database \(also created automatically\) to keep track of already applied migrations. So no specific maintenance should be needed. - -If you already have existing deployment of feast 0.5 - Flyway will detect existing tables and omit first baseline migration. - -After `core` started you should have `flyway_schema_history` look like this - -```text ->> select version, description, script, checksum from flyway_schema_history - -version | description | script | checksum ---------+-----------------------------------------+-----------------------------------------+------------ - 1 | << Flyway Baseline >> | << Flyway Baseline >> | - 2 | RELEASE 0.6 Generalizing Source AND ... | V2__RELEASE_0.6_Generalizing_Source_... | 1537500232 -``` - -In this release next major schema changes were done: - -* Source is not shared between FeatureSets anymore. It's changed to 1:1 relation - - and source's primary key is now auto-incremented number. - -* Due to generalization of Source `sources.topics` & `sources.bootstrap_servers` columns were deprecated. - - They will be replaced with `sources.config`. Data migration handled by code when respected Source is used. - - `topics` and `bootstrap_servers` will be deleted in the next release. - -* Job \(table `jobs`\) is no longer connected to `Source` \(table `sources`\) since it uses consolidated source for optimization purposes. - - All data required by Job would be embedded in its table. - -New Models \(tables\): - -* feature\_statistics - -Minor changes: - -* FeatureSet has new column version \(see [proto](https://github.com/feast-dev/feast/blob/master/protos/feast/core/FeatureSet.proto) for details\) -* Connecting table `jobs_feature_sets` in many-to-many relation between jobs & feature sets - - has now `version` and `delivery_status`. - -### Migration from v0.4 to v0.6 - -#### Database - -For all versions earlier than 0.5 seamless migration is not feasible due to earlier breaking changes and creation of new database will be required. - -Since database will be empty - first \(baseline\) migration would be applied: - -```text ->> select version, description, script, checksum from flyway_schema_history - -version | description | script | checksum ---------+-----------------------------------------+-----------------------------------------+------------ - 1 | Baseline | V1__Baseline.sql | 1091472110 - 2 | RELEASE 0.6 Generalizing Source AND ... | V2__RELEASE_0.6_Generalizing_Source_... | 1537500232 -``` - diff --git a/docs/feast-on-kubernetes/concepts/README.md b/docs/feast-on-kubernetes/concepts/README.md deleted file mode 100644 index e834417d3f..0000000000 --- a/docs/feast-on-kubernetes/concepts/README.md +++ /dev/null @@ -1,2 +0,0 @@ -# Concepts - diff --git a/docs/feast-on-kubernetes/concepts/architecture.md b/docs/feast-on-kubernetes/concepts/architecture.md deleted file mode 100644 index f4cf23eb95..0000000000 --- a/docs/feast-on-kubernetes/concepts/architecture.md +++ /dev/null @@ -1,51 +0,0 @@ -# Architecture - -![](../../.gitbook/assets/image%20%286%29%20%283%29%20%283%29%20%283%29%20%283%29%20%283%29%20%283%29%20%283%29%20%283%29%20%283%29%20%282%29%20%281%29%20%282%29.png) - -## Sequence description - -1. **Log Raw Events:** Production backend applications are configured to emit internal state changes as events to a stream. -2. **Create Stream Features:** Stream processing systems like Flink, Spark, and Beam are used to transform and refine events and to produce features that are logged back to the stream. -3. **Log Streaming Features:** Both raw and refined events are logged into a data lake or batch storage location. -4. **Create Batch Features:** ELT/ETL systems like Spark and SQL are used to transform data in the batch store. -5. **Define and Ingest Features:** The Feast user defines [feature tables](feature-tables.md) based on the features available in batch and streaming sources and publish these definitions to Feast Core. -6. **Poll Feature Definitions:** The Feast Job Service polls for new or changed feature definitions. -7. **Start Ingestion Jobs:** Every new feature table definition results in a new ingestion job being provisioned \(see limitations\). -8. **Batch Ingestion:** Batch ingestion jobs are short-lived jobs that load data from batch sources into either an offline or online store \(see limitations\). -9. **Stream Ingestion:** Streaming ingestion jobs are long-lived jobs that load data from stream sources into online stores. A stream source and batch source on a feature table must have the same features/fields. -10. **Model Training:** A model training pipeline is launched. It uses the Feast Python SDK to retrieve a training dataset and trains a model. -11. **Get Historical Features:** Feast exports a point-in-time correct training dataset based on the list of features and entity DataFrame provided by the model training pipeline. -12. **Deploy Model:** The trained model binary \(and list of features\) are deployed into a model serving system. -13. **Get Prediction:** A backend system makes a request for a prediction from the model serving service. -14. **Retrieve Online Features:** The model serving service makes a request to the Feast Online Serving service for online features using a Feast SDK. -15. **Return Prediction:** The model serving service makes a prediction using the returned features and returns the outcome. - -{% hint style="warning" %} -Limitations - -* Only Redis is supported for online storage. -* Batch ingestion jobs must be triggered from your own scheduler like Airflow. Streaming ingestion jobs are automatically launched by the Feast Job Service. -{% endhint %} - -## Components: - -A complete Feast deployment contains the following components: - -* **Feast Core:** Acts as the central registry for feature and entity definitions in Feast. -* **Feast Job Service:** Manages data processing jobs that load data from sources into stores, and jobs that export training datasets. -* **Feast Serving:** Provides low-latency access to feature values in an online store. -* **Feast Python SDK CLI:** The primary user facing SDK. Used to: - * Manage feature definitions with Feast Core. - * Launch jobs through the Feast Job Service. - * Retrieve training datasets. - * Retrieve online features. -* **Online Store:** The online store is a database that stores only the latest feature values for each entity. The online store can be populated by either batch ingestion jobs \(in the case the user has no streaming source\), or can be populated by a streaming ingestion job from a streaming source. Feast Online Serving looks up feature values from the online store. -* **Offline Store:** The offline store persists batch data that has been ingested into Feast. This data is used for producing training datasets. -* **Feast Spark SDK:** A Spark specific Feast SDK. Allows teams to use Spark for loading features into an online store and for building training datasets over offline sources. - -Please see the [configuration reference](../reference-1/configuration-reference.md#overview) for more details on configuring these components. - -{% hint style="info" %} -Java and Go Clients are also available for online feature retrieval. See [API Reference](../reference-1/api/). -{% endhint %} - diff --git a/docs/feast-on-kubernetes/concepts/entities.md b/docs/feast-on-kubernetes/concepts/entities.md deleted file mode 100644 index e8134cf142..0000000000 --- a/docs/feast-on-kubernetes/concepts/entities.md +++ /dev/null @@ -1,64 +0,0 @@ -# Entities - -## Overview - -An entity is any domain object that can be modeled and about which information can be stored. Entities are usually recognizable concepts, either concrete or abstract, such as persons, places, things, or events. - -Examples of entities in the context of ride-hailing and food delivery: `customer`, `order`, `driver`, `restaurant`, `dish`, `area`. - -Entities are important in the context of feature stores since features are always properties of a specific entity. For example, we could have a feature `total_trips_24h` for driver `D011234` with a feature value of `11`. - -Feast uses entities in the following way: - -* Entities serve as the keys used to look up features for producing training datasets and online feature values. -* Entities serve as a natural grouping of features in a feature table. A feature table must belong to an entity \(which could be a composite entity\) - -## Structure of an Entity - -When creating an entity specification, consider the following fields: - -* **Name**: Name of the entity -* **Description**: Description of the entity -* **Value Type**: Value type of the entity. Feast will attempt to coerce entity columns in your data sources into this type. -* **Labels**: Labels are maps that allow users to attach their own metadata to entities - -A valid entity specification is shown below: - -```python -customer = Entity( - name="customer_id", - description="Customer id for ride customer", - value_type=ValueType.INT64, - labels={} -) -``` - -## Working with an Entity - -### Creating an Entity: - -```python -# Create a customer entity -customer_entity = Entity(name="customer_id", description="ID of car customer") -client.apply(customer_entity) -``` - -### Updating an Entity: - -```python -# Update a customer entity -customer_entity = client.get_entity("customer_id") -customer_entity.description = "ID of bike customer" -client.apply(customer_entity) -``` - -Permitted changes include: - -* The entity's description and labels - -The following changes are not permitted: - -* Project -* Name of an entity -* Type - diff --git a/docs/feast-on-kubernetes/concepts/feature-tables.md b/docs/feast-on-kubernetes/concepts/feature-tables.md deleted file mode 100644 index 5b5c0efc56..0000000000 --- a/docs/feast-on-kubernetes/concepts/feature-tables.md +++ /dev/null @@ -1,122 +0,0 @@ -# Feature Tables - -## Overview - -Feature tables are both a schema and a logical means of grouping features, data [sources](sources.md), and other related metadata. - -Feature tables serve the following purposes: - -* Feature tables are a means for defining the location and properties of data [sources](sources.md). -* Feature tables are used to create within Feast a database-level structure for the storage of feature values. -* The data sources described within feature tables allow Feast to find and ingest feature data into stores within Feast. -* Feature tables ensure data is efficiently stored during [ingestion](../user-guide/define-and-ingest-features.md) by providing a grouping mechanism of features values that occur on the same event timestamp. - -{% hint style="info" %} -Feast does not yet apply feature transformations. Transformations are currently expected to happen before data is ingested into Feast. The data sources described within feature tables should reference feature values in their already transformed form. -{% endhint %} - -### Features - -A feature is an individual measurable property observed on an entity. For example the amount of transactions \(feature\) a customer \(entity\) has completed. Features are used for both model training and scoring \(batch, online\). - -Features are defined as part of feature tables. Since Feast does not apply transformations, a feature is basically a schema that only contains a name and a type: - -```python -avg_daily_ride = Feature("average_daily_rides", ValueType.FLOAT) -``` - -Visit [FeatureSpec](https://api.docs.feast.dev/grpc/feast.core.pb.html#FeatureSpecV2) for the complete feature specification API. - -## Structure of a Feature Table - -Feature tables contain the following fields: - -* **Name:** Name of feature table. This name must be unique within a project. -* **Entities:** List of [entities](entities.md) to associate with the features defined in this feature table. Entities are used as lookup keys when retrieving features from a feature table. -* **Features:** List of features within a feature table. -* **Labels:** Labels are arbitrary key-value properties that can be defined by users. -* **Max age:** Max age affect the retrieval of features from a feature table. Age is measured as the duration of time between the event timestamp of a feature and the lookup time on an [entity key]() used to retrieve the feature. Feature values outside max age will be returned as unset values. Max age allows for eviction of keys from online stores and limits the amount of historical scanning required for historical feature values during retrieval. -* **Batch Source:** The batch data source from which Feast will ingest feature values into stores. This can either be used to back-fill stores before switching over to a streaming source, or it can be used as the primary source of data for a feature table. Visit [Sources](sources.md) to learn more about batch sources. -* **Stream Source:** The streaming data source from which you can ingest streaming feature values into Feast. Streaming sources must be paired with a batch source containing the same feature values. A streaming source is only used to populate online stores. The batch equivalent source that is paired with a streaming source is used during the generation of historical feature datasets. Visit [Sources](sources.md) to learn more about stream sources. - -Here is a ride-hailing example of a valid feature table specification: - -{% tabs %} -{% tab title="driver\_trips\_feature\_table.py" %} -```python -from feast import BigQuerySource, FeatureTable, Feature, ValueType -from google.protobuf.duration_pb2 import Duration - -driver_ft = FeatureTable( - name="driver_trips", - entities=["driver_id"], - features=[ - Feature("average_daily_rides", ValueType.FLOAT), - Feature("rating", ValueType.FLOAT) - ], - max_age=Duration(seconds=3600), - labels={ - "team": "driver_matching" - }, - batch_source=BigQuerySource( - table_ref="gcp_project:bq_dataset.bq_table", - event_timestamp_column="datetime", - created_timestamp_column="timestamp", - field_mapping={ - "rating": "driver_rating" - } - ) -) -``` -{% endtab %} -{% endtabs %} - -By default, Feast assumes that features specified in the feature-table specification corresponds one-to-one to the fields found in the sources. All features defined in a feature table should be available in the defined sources. - -Field mappings can be used to map features defined in Feast to fields as they occur in data sources. - -In the example feature-specification table above, we use field mappings to ensure the feature named `rating` in the batch source is mapped to the field named `driver_rating`. - -## Working with a Feature Table - -#### Creating a Feature Table - -```python -driver_ft = FeatureTable(...) -client.apply(driver_ft) -``` - -#### Updating a Feature Table - -```python -driver_ft = FeatureTable() - -client.apply(driver_ft) - -driver_ft.labels = {"team": "marketplace"} - -client.apply(driver_ft) -``` - -#### Feast currently supports the following changes to feature tables: - -* Adding new features. -* Removing features. -* Updating source, max age, and labels. - -{% hint style="warning" %} -Deleted features are archived, rather than removed completely. Importantly, new features cannot use the names of these deleted features. -{% endhint %} - -#### Feast currently does not support the following changes to feature tables: - -* Changes to the project or name of a feature table. -* Changes to entities related to a feature table. -* Changes to names and types of existing features. - -#### Deleting a Feature Table - -{% hint style="danger" %} -Feast currently does not support the deletion of feature tables. -{% endhint %} - diff --git a/docs/feast-on-kubernetes/concepts/overview.md b/docs/feast-on-kubernetes/concepts/overview.md deleted file mode 100644 index 461510984b..0000000000 --- a/docs/feast-on-kubernetes/concepts/overview.md +++ /dev/null @@ -1,21 +0,0 @@ -# Overview - -### Concepts - -[Entities](entities.md) are objects in an organization like customers, transactions, and drivers, products, etc. - -[Sources](sources.md) are external sources of data where feature data can be found. - -[Feature Tables](feature-tables.md) are objects that define logical groupings of features, data sources, and other related metadata. - -### Concept Hierarchy - -![](../../.gitbook/assets/image%20%284%29%20%282%29%20%282%29%20%282%29%20%282%29%20%282%29%20%282%29%20%282%29%20%282%29%20%282%29%20%282%29%20%282%29%20%282%29%20%282%29%20%283%29%20%283%29%20%283%29%20%283%29%20%281%29.png) - -Feast contains the following core concepts: - -* **Projects:** Serve as a top level namespace for all Feast resources. Each project is a completely independent environment in Feast. Users can only work in a single project at a time. -* **Entities:** Entities are the objects in an organization on which features occur. They map to your business domain \(users, products, transactions, locations\). -* **Feature Tables:** Defines a group of features that occur on a specific entity. -* **Features:** Individual feature within a feature table. - diff --git a/docs/feast-on-kubernetes/concepts/sources.md b/docs/feast-on-kubernetes/concepts/sources.md deleted file mode 100644 index 65595d94a9..0000000000 --- a/docs/feast-on-kubernetes/concepts/sources.md +++ /dev/null @@ -1,90 +0,0 @@ -# Sources - -### Overview - -Sources are descriptions of external feature data and are registered to Feast as part of [feature tables](feature-tables.md). Once registered, Feast can ingest feature data from these sources into stores. - -Currently, Feast supports the following source types: - -#### Batch Source - -* File \(as in Spark\): Parquet \(only\). -* BigQuery - -#### Stream Source - -* Kafka -* Kinesis - -The following encodings are supported on streams - -* Avro -* Protobuf - -### Structure of a Source - -For both batch and stream sources, the following configurations are necessary: - -* **Event timestamp column**: Name of column containing timestamp when event data occurred. Used during point-in-time join of feature values to [entity timestamps](). -* **Created timestamp column**: Name of column containing timestamp when data is created. Used to deduplicate data when multiple copies of the same [entity key]() is ingested. - -Example data source specifications: - -{% tabs %} -{% tab title="batch\_sources.py" %} -```python -from feast import FileSource -from feast.data_format import ParquetFormat - -batch_file_source = FileSource( - file_format=ParquetFormat(), - file_url="file:///feast/customer.parquet", - event_timestamp_column="event_timestamp", - created_timestamp_column="created_timestamp", -) -``` -{% endtab %} - -{% tab title="stream\_sources.py" %} -```python -from feast import KafkaSource -from feast.data_format import ProtoFormat - -stream_kafka_source = KafkaSource( - bootstrap_servers="localhost:9094", - message_format=ProtoFormat(class_path="class.path"), - topic="driver_trips", - event_timestamp_column="event_timestamp", - created_timestamp_column="created_timestamp", -) -``` -{% endtab %} -{% endtabs %} - -The [Feast Python API documentation](https://api.docs.feast.dev/python/) provides more information about options to specify for the above sources. - -### Working with a Source - -#### Creating a Source - -Sources are defined as part of [feature tables](feature-tables.md): - -```python -batch_bigquery_source = BigQuerySource( - table_ref="gcp_project:bq_dataset.bq_table", - event_timestamp_column="event_timestamp", - created_timestamp_column="created_timestamp", -) - -stream_kinesis_source = KinesisSource( - bootstrap_servers="localhost:9094", - record_format=ProtoFormat(class_path="class.path"), - region="us-east-1", - stream_name="driver_trips", - event_timestamp_column="event_timestamp", - created_timestamp_column="created_timestamp", -) -``` - -Feast ensures that the source complies with the schema of the feature table. These specified data sources can then be included inside a feature table specification and registered to Feast Core. - diff --git a/docs/feast-on-kubernetes/concepts/stores.md b/docs/feast-on-kubernetes/concepts/stores.md deleted file mode 100644 index 59deac0a6a..0000000000 --- a/docs/feast-on-kubernetes/concepts/stores.md +++ /dev/null @@ -1,20 +0,0 @@ -# Stores - -In Feast, a store is a database that is populated with feature data that will ultimately be served to models. - -## Offline \(Historical\) Store - -The offline store maintains historical copies of feature values. These features are grouped and stored in feature tables. During retrieval of historical data, features are queries from these feature tables in order to produce training datasets. - -## Online Store - -The online store maintains only the latest values for a specific feature. - -* Feature values are stored based on their [entity keys]() -* Feast currently supports Redis as an online store. -* Online stores are meant for very high throughput writes from ingestion jobs and very low latency access to features during online serving. - -{% hint style="info" %} -Feast only supports a single online store in production -{% endhint %} - diff --git a/docs/feast-on-kubernetes/getting-started/README.md b/docs/feast-on-kubernetes/getting-started/README.md deleted file mode 100644 index f0c2b76cc5..0000000000 --- a/docs/feast-on-kubernetes/getting-started/README.md +++ /dev/null @@ -1,24 +0,0 @@ -# Getting started - -{% hint style="danger" %} -Feast on Kubernetes is only supported using Feast 0.9 \(and below\). We are working to add support for Feast on Kubernetes with the latest release of Feast \(0.10+\). Please see our [roadmap](../../roadmap.md) for more details. -{% endhint %} - -### Install Feast - -If you would like to deploy a new installation of Feast, click on [Install Feast](install-feast/) - -{% page-ref page="install-feast/" %} - -### Connect to Feast - -If you would like to connect to an existing Feast deployment, click on [Connect to Feast](connect-to-feast/) - -{% page-ref page="connect-to-feast/" %} - -### Learn Feast - -If you would like to learn more about Feast, click on [Learn Feast](learn-feast.md) - -{% page-ref page="learn-feast.md" %} - diff --git a/docs/feast-on-kubernetes/getting-started/connect-to-feast/README.md b/docs/feast-on-kubernetes/getting-started/connect-to-feast/README.md deleted file mode 100644 index 4333359f90..0000000000 --- a/docs/feast-on-kubernetes/getting-started/connect-to-feast/README.md +++ /dev/null @@ -1,31 +0,0 @@ -# Connect to Feast - -### Feast Python SDK - -The Feast Python SDK is used as a library to interact with a Feast deployment. - -* Define, register, and manage entities and features -* Ingest data into Feast -* Build and retrieve training datasets -* Retrieve online features - -{% page-ref page="python-sdk.md" %} - -### Feast CLI - -The Feast CLI is a command line implementation of the Feast Python SDK. - -* Define, register, and manage entities and features from the terminal -* Ingest data into Feast -* Manage ingestion jobs - -{% page-ref page="feast-cli.md" %} - -### Online Serving Clients - -The following clients can be used to retrieve online feature values: - -* [Feast Python SDK](https://api.docs.feast.dev/python/) -* [Feast Go SDK](https://godoc.org/github.com/feast-dev/feast/sdk/go) -* [Feast Java SDK](https://javadoc.io/doc/dev.feast/feast-sdk) - diff --git a/docs/feast-on-kubernetes/getting-started/connect-to-feast/feast-cli.md b/docs/feast-on-kubernetes/getting-started/connect-to-feast/feast-cli.md deleted file mode 100644 index 47471b8471..0000000000 --- a/docs/feast-on-kubernetes/getting-started/connect-to-feast/feast-cli.md +++ /dev/null @@ -1,37 +0,0 @@ -# Feast CLI - -Install the Feast CLI using pip: - -```bash -pip install feast==0.9.* -``` - -Configure the CLI to connect to your Feast Core deployment: - -```text -feast config set core_url your.feast.deployment -``` - -{% hint style="info" %} -By default, all configuration is stored in `~/.feast/config` -{% endhint %} - -The CLI is a wrapper around the [Feast Python SDK](python-sdk.md): - -```aspnet -$ feast - -Usage: feast [OPTIONS] COMMAND [ARGS]... - -Options: - --help Show this message and exit. - -Commands: - config View and edit Feast properties - entities Create and manage entities - feature-tables Create and manage feature tables - jobs Create and manage jobs - projects Create and manage projects - version Displays version and connectivity information -``` - diff --git a/docs/feast-on-kubernetes/getting-started/connect-to-feast/python-sdk.md b/docs/feast-on-kubernetes/getting-started/connect-to-feast/python-sdk.md deleted file mode 100644 index 3e7c86880e..0000000000 --- a/docs/feast-on-kubernetes/getting-started/connect-to-feast/python-sdk.md +++ /dev/null @@ -1,20 +0,0 @@ -# Python SDK - -Install the [Feast Python SDK](https://api.docs.feast.dev/python/) using pip: - -```bash -pip install feast==0.9.* -``` - -Connect to an existing Feast Core deployment: - -```python -from feast import Client - -# Connect to an existing Feast Core deployment -client = Client(core_url='feast.example.com:6565') - -# Ensure that your client is connected by printing out some feature tables -client.list_feature_tables() -``` - diff --git a/docs/feast-on-kubernetes/getting-started/install-feast/README.md b/docs/feast-on-kubernetes/getting-started/install-feast/README.md deleted file mode 100644 index 0b77ab431a..0000000000 --- a/docs/feast-on-kubernetes/getting-started/install-feast/README.md +++ /dev/null @@ -1,40 +0,0 @@ -# Install Feast - -A production deployment of Feast is deployed using Kubernetes. - -## Kubernetes \(with Helm\) - -This guide installs Feast into an existing Kubernetes cluster using Helm. The installation is not specific to any cloud platform or environment, but requires Kubernetes and Helm. - -{% page-ref page="kubernetes-with-helm.md" %} - -## Amazon EKS \(with Terraform\) - -This guide installs Feast into an AWS environment using Terraform. The Terraform script is opinionated and intended to allow you to start quickly. - -{% page-ref page="kubernetes-amazon-eks-with-terraform.md" %} - -## Azure AKS \(with Helm\) - -This guide installs Feast into an Azure AKS environment with Helm. - -{% page-ref page="kubernetes-azure-aks-with-helm.md" %} - -## Azure AKS \(with Terraform\) - -This guide installs Feast into an Azure environment using Terraform. The Terraform script is opinionated and intended to allow you to start quickly. - -{% page-ref page="kubernetes-azure-aks-with-terraform.md" %} - -## Google Cloud GKE \(with Terraform\) - -This guide installs Feast into a Google Cloud environment using Terraform. The Terraform script is opinionated and intended to allow you to start quickly. - -{% page-ref page="google-cloud-gke-with-terraform.md" %} - -## IBM Cloud Kubernetes Service \(IKS\) and Red Hat OpenShift \(using Kustomize\) - -This guide installs Feast into an existing [IBM Cloud Kubernetes Service](https://www.ibm.com/cloud/kubernetes-service) or [Red Hat OpenShift on IBM Cloud](https://www.ibm.com/cloud/openshift) using Kustomize. - -{% page-ref page="ibm-cloud-iks-with-kustomize.md" %} - diff --git a/docs/feast-on-kubernetes/getting-started/install-feast/google-cloud-gke-with-terraform.md b/docs/feast-on-kubernetes/getting-started/install-feast/google-cloud-gke-with-terraform.md deleted file mode 100644 index a3252cf0bb..0000000000 --- a/docs/feast-on-kubernetes/getting-started/install-feast/google-cloud-gke-with-terraform.md +++ /dev/null @@ -1,52 +0,0 @@ -# Google Cloud GKE \(with Terraform\) - -### Overview - -This guide installs Feast on GKE using our [reference Terraform configuration](https://github.com/feast-dev/feast/tree/master/infra/terraform/gcp). - -{% hint style="info" %} -The Terraform configuration used here is a greenfield installation that neither assumes anything about, nor integrates with, existing resources in your GCP account. The Terraform configuration presents an easy way to get started, but you may want to customize this set up before using Feast in production. -{% endhint %} - -This Terraform configuration creates the following resources: - -* GKE cluster -* Feast services running on GKE -* Google Memorystore \(Redis\) as online store -* Dataproc cluster -* Kafka running on GKE, exposed to the dataproc cluster via internal load balancer - -### 1. Requirements - -* Install [Terraform](https://www.terraform.io/) > = 0.12 \(tested with 0.13.3\) -* Install [Helm](https://helm.sh/docs/intro/install/) \(tested with v3.3.4\) -* GCP [authentication](https://cloud.google.com/docs/authentication) and sufficient [privilege](https://cloud.google.com/iam/docs/understanding-roles) to create the resources listed above. - -### 2. Configure Terraform - -Create a `.tfvars` file under`feast/infra/terraform/gcp`. Name the file. In our example, we use `my_feast.tfvars`. You can see the full list of configuration variables in `variables.tf`. Sample configurations are provided below: - -{% code title="my\_feast.tfvars" %} -```typescript -gcp_project_name = "kf-feast" -name_prefix = "feast-0-8" -region = "asia-east1" -gke_machine_type = "n1-standard-2" -network = "default" -subnetwork = "default" -dataproc_staging_bucket = "feast-dataproc" -``` -{% endcode %} - -### 3. Apply - -After completing the configuration, initialize Terraform and apply: - -```bash -$ cd feast/infra/terraform/gcp -$ terraform init -$ terraform apply -var-file=my_feast.tfvars -``` - - - diff --git a/docs/feast-on-kubernetes/getting-started/install-feast/ibm-cloud-iks-with-kustomize.md b/docs/feast-on-kubernetes/getting-started/install-feast/ibm-cloud-iks-with-kustomize.md deleted file mode 100644 index 0abca57b6d..0000000000 --- a/docs/feast-on-kubernetes/getting-started/install-feast/ibm-cloud-iks-with-kustomize.md +++ /dev/null @@ -1,193 +0,0 @@ -# IBM Cloud Kubernetes Service \(IKS\) and Red Hat OpenShift \(with Kustomize\) - -## Overview - -This guide installs Feast on an existing IBM Cloud Kubernetes cluster or Red Hat OpenShift on IBM Cloud , and ensures the following services are running: - -* Feast Core -* Feast Online Serving -* Postgres -* Redis -* Kafka \(Optional\) -* Feast Jupyter \(Optional\) -* Prometheus \(Optional\) - -## 1. Prerequisites - -1. [IBM Cloud Kubernetes Service](https://www.ibm.com/cloud/kubernetes-service) or [Red Hat OpenShift on IBM Cloud](https://www.ibm.com/cloud/openshift) -2. Install [Kubectl](https://cloud.ibm.com/docs/containers?topic=containers-cs_cli_install#kubectl) that matches the major.minor versions of your IKS or Install the [OpenShift CLI](https://cloud.ibm.com/docs/openshift?topic=openshift-openshift-cli#cli_oc) that matches your local operating system and OpenShift cluster version. -3. Install [Helm 3](https://helm.sh/) -4. Install [Kustomize](https://kubectl.docs.kubernetes.io/installation/kustomize/) - -## 2. Preparation - -### IBM Cloud Block Storage Setup \(IKS only\) - -:warning: If you have Red Hat OpenShift Cluster on IBM Cloud skip to this [section](ibm-cloud-iks-with-kustomize.md#Security-Context-Constraint-Setup). - -By default, IBM Cloud Kubernetes cluster uses [IBM Cloud File Storage](https://www.ibm.com/cloud/file-storage) based on NFS as the default storage class, and non-root users do not have write permission on the volume mount path for NFS-backed storage. Some common container images in Feast, such as Redis, Postgres, and Kafka specify a non-root user to access the mount path in the images. When containers are deployed using these images, the containers fail to start due to insufficient permissions of the non-root user creating folders on the mount path. - -[IBM Cloud Block Storage](https://www.ibm.com/cloud/block-storage) allows for the creation of raw storage volumes and provides faster performance without the permission restriction of NFS-backed storage - -Therefore, to deploy Feast we need to set up [IBM Cloud Block Storage](https://cloud.ibm.com/docs/containers?topic=containers-block_storage#install_block) as the default storage class so that you can have all the functionalities working and get the best experience from Feast. - -1. [Follow the instructions](https://helm.sh/docs/intro/install/) to install the Helm version 3 client on your local machine. -2. Add the IBM Cloud Helm chart repository to the cluster where you want to use the IBM Cloud Block Storage plug-in. - - ```text - helm repo add iks-charts https://icr.io/helm/iks-charts - helm repo update - ``` - -3. Install the IBM Cloud Block Storage plug-in. When you install the plug-in, pre-defined block storage classes are added to your cluster. - - ```text - helm install v2.0.2 iks-charts/ibmcloud-block-storage-plugin -n kube-system - ``` - - Example output: - - ```text - NAME: v2.0.2 - LAST DEPLOYED: Fri Feb 5 12:29:50 2021 - NAMESPACE: kube-system - STATUS: deployed - REVISION: 1 - NOTES: - Thank you for installing: ibmcloud-block-storage-plugin. Your release is named: v2.0.2 - ... - ``` - -4. Verify that all block storage plugin pods are in a "Running" state. - - ```text - kubectl get pods -n kube-system | grep ibmcloud-block-storage - ``` - -5. Verify that the storage classes for Block Storage were added to your cluster. - - ```text - kubectl get storageclasses | grep ibmc-block - ``` - -6. Set the Block Storage as the default storageclass. - - ```text - kubectl patch storageclass ibmc-block-gold -p '{"metadata": {"annotations":{"storageclass.kubernetes.io/is-default-class":"true"}}}' - kubectl patch storageclass ibmc-file-gold -p '{"metadata": {"annotations":{"storageclass.kubernetes.io/is-default-class":"false"}}}' - - # Check the default storageclass is block storage - kubectl get storageclass | grep \(default\) - ``` - - Example output: - - ```text - ibmc-block-gold (default) ibm.io/ibmc-block 65s - ``` - - **Security Context Constraint Setup \(OpenShift only\)** - -By default, in OpenShift, all pods or containers will use the [Restricted SCC](https://docs.openshift.com/container-platform/4.6/authentication/managing-security-context-constraints.html) which limits the UIDs pods can run with, causing the Feast installation to fail. To overcome this, you can allow Feast pods to run with any UID by executing the following: - -```text -oc adm policy add-scc-to-user anyuid -z default,kf-feast-kafka -n feast -``` - -## 3. Installation - -Install Feast using kustomize. The pods may take a few minutes to initialize. - -```bash -git clone https://github.com/kubeflow/manifests -cd manifests/contrib/feast/ -kustomize build feast/base | kubectl apply -n feast -f - -``` - -### Optional: Enable Feast Jupyter and Kafka - -You may optionally enable the Feast Jupyter component which contains code examples to demonstrate Feast. Some examples require Kafka to stream real time features to the Feast online serving. To enable, edit the following properties in the `values.yaml` under the `manifests/contrib/feast` folder: - -```text -kafka.enabled: true -feast-jupyter.enabled: true -``` - -Then regenerate the resource manifests and deploy: - -```text -make feast/base -kustomize build feast/base | kubectl apply -n feast -f - -``` - -## 4. Use Feast Jupyter Notebook Server to connect to Feast - -After all the pods are in a `RUNNING` state, port-forward to the Jupyter Notebook Server in the cluster: - -```bash -kubectl port-forward \ -$(kubectl get pod -l app=feast-jupyter -o custom-columns=:metadata.name) 8888:8888 -n feast -``` - -```text -Forwarding from 127.0.0.1:8888 -> 8888 -Forwarding from [::1]:8888 -> 8888 -``` - -You can now connect to the bundled Jupyter Notebook Server at `localhost:8888` and follow the example Jupyter notebook. - -{% embed url="http://localhost:8888/tree?" caption="" %} - -## 5. Uninstall Feast - -```text -kustomize build feast/base | kubectl delete -n feast -f - -``` - -## 6. Troubleshooting - -When running the minimal\_ride\_hailing\_example Jupyter Notebook example the following errors may occur: - -1. When running `job = client.get_historical_features(...)`: - - ```text - KeyError: 'historical_feature_output_location' - ``` - - or - - ```text - KeyError: 'spark_staging_location' - ``` - - Add the following environment variable: - - ```text - os.environ["FEAST_HISTORICAL_FEATURE_OUTPUT_LOCATION"] = "file:///home/jovyan/historical_feature_output" - os.environ["FEAST_SPARK_STAGING_LOCATION"] = "file:///home/jovyan/test_data" - ``` - -2. When running `job.get_status()` - - ```text - - ``` - - Add the following environment variable: - - ```text - os.environ["FEAST_REDIS_HOST"] = "feast-release-redis-master" - ``` - -3. When running `job = client.start_stream_to_online_ingestion(...)` - - ```text - org.apache.kafka.vendor.common.KafkaException: Failed to construct kafka consumer - ``` - - Add the following environment variable: - - ```text - os.environ["DEMO_KAFKA_BROKERS"] = "feast-release-kafka:9092" - ``` - diff --git a/docs/feast-on-kubernetes/getting-started/install-feast/kubernetes-amazon-eks-with-terraform.md b/docs/feast-on-kubernetes/getting-started/install-feast/kubernetes-amazon-eks-with-terraform.md deleted file mode 100644 index d03d7fb863..0000000000 --- a/docs/feast-on-kubernetes/getting-started/install-feast/kubernetes-amazon-eks-with-terraform.md +++ /dev/null @@ -1,68 +0,0 @@ -# Amazon EKS \(with Terraform\) - -### Overview - -This guide installs Feast on AWS using our [reference Terraform configuration](https://github.com/feast-dev/feast/tree/master/infra/terraform/aws). - -{% hint style="info" %} -The Terraform configuration used here is a greenfield installation that neither assumes anything about, nor integrates with, existing resources in your AWS account. The Terraform configuration presents an easy way to get started, but you may want to customize this set up before using Feast in production. -{% endhint %} - -This Terraform configuration creates the following resources: - -* Kubernetes cluster on Amazon EKS \(3x r3.large nodes\) -* Kafka managed by Amazon MSK \(2x kafka.t3.small nodes\) -* Postgres database for Feast metadata, using serverless Aurora \(min capacity: 2\) -* Redis cluster, using Amazon Elasticache \(1x cache.t2.micro\) -* Amazon EMR cluster to run Spark \(3x spot m4.xlarge\) -* Staging S3 bucket to store temporary data - -![](../../../.gitbook/assets/feast-on-aws-3-%20%282%29%20%282%29%20%282%29%20%282%29%20%282%29%20%282%29%20%282%29%20%282%29%20%282%29%20%282%29%20%282%29%20%283%29.png) - -### 1. Requirements - -* Create an AWS account and [configure credentials locally](https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-configure.html) -* Install [Terraform](https://www.terraform.io/) > = 0.12 \(tested with 0.13.3\) -* Install [Helm](https://helm.sh/docs/intro/install/) \(tested with v3.3.4\) - -### 2. Configure Terraform - -Create a `.tfvars` file under`feast/infra/terraform/aws`. Name the file. In our example, we use `my_feast.tfvars`. You can see the full list of configuration variables in `variables.tf`. At a minimum, you need to set `name_prefix` and an AWS region: - -{% code title="my\_feast.tfvars" %} -```typescript -name_prefix = "my-feast" -region = "us-east-1" -``` -{% endcode %} - -### 3. Apply - -After completing the configuration, initialize Terraform and apply: - -```bash -$ cd feast/infra/terraform/aws -$ terraform init -$ terraform apply -var-file=my_feast.tfvars -``` - -Starting may take a minute. A kubectl configuration file is also created in this directory, and the file's name will start with `kubeconfig_` and end with a random suffix. - -### 4. Connect to Feast using Jupyter - -After all pods are running, connect to the Jupyter Notebook Server running in the cluster. - -To connect to the remote Feast server you just created, forward a port from the remote k8s cluster to your local machine. Replace `kubeconfig_XXXXXXX` below with the kubeconfig file name Terraform generates for you. - -```bash -KUBECONFIG=kubeconfig_XXXXXXX kubectl port-forward \ -$(kubectl get pod -o custom-columns=:metadata.name | grep jupyter) 8888:8888 -``` - -```text -Forwarding from 127.0.0.1:8888 -> 8888 -Forwarding from [::1]:8888 -> 8888 -``` - -You can now connect to the bundled Jupyter Notebook Server at `localhost:8888` and follow the example Jupyter notebook. - diff --git a/docs/feast-on-kubernetes/getting-started/install-feast/kubernetes-azure-aks-with-helm.md b/docs/feast-on-kubernetes/getting-started/install-feast/kubernetes-azure-aks-with-helm.md deleted file mode 100644 index 39dcdbd700..0000000000 --- a/docs/feast-on-kubernetes/getting-started/install-feast/kubernetes-azure-aks-with-helm.md +++ /dev/null @@ -1,139 +0,0 @@ -# Azure AKS \(with Helm\) - -## Overview - -This guide installs Feast on Azure Kubernetes cluster \(known as AKS\), and ensures the following services are running: - -* Feast Core -* Feast Online Serving -* Postgres -* Redis -* Spark -* Kafka -* Feast Jupyter \(Optional\) -* Prometheus \(Optional\) - -## 1. Requirements - -1. Install and configure [Azure CLI](https://docs.microsoft.com/en-us/cli/azure/install-azure-cli) -2. Install and configure [Kubectl](https://kubernetes.io/docs/tasks/tools/install-kubectl/) -3. Install [Helm 3](https://helm.sh/) - -## 2. Preparation - -Create an AKS cluster with Azure CLI. The detailed steps can be found [here](https://docs.microsoft.com/en-us/azure/aks/kubernetes-walkthrough), and a high-level walk through includes: - -```bash -az group create --name myResourceGroup --location eastus -az acr create --resource-group myResourceGroup --name feast-AKS-ACR --sku Basic -az aks create -g myResourceGroup -n feast-AKS --location eastus --attach-acr feast-AKS-ACR --generate-ssh-keys - -az aks install-cli -az aks get-credentials --resource-group myResourceGroup --name feast-AKS -``` - -Add the Feast Helm repository and download the latest charts: - -```bash -helm version # make sure you have the latest Helm installed -helm repo add feast-charts https://feast-helm-charts.storage.googleapis.com -helm repo update -``` - -Feast includes a Helm chart that installs all necessary components to run Feast Core, Feast Online Serving, and an example Jupyter notebook. - -Feast Core requires Postgres to run, which requires a secret to be set on Kubernetes: - -```bash -kubectl create secret generic feast-postgresql --from-literal=postgresql-password=password -``` - -## 3. Feast installation - -Install Feast using Helm. The pods may take a few minutes to initialize. - -```bash -helm install feast-release feast-charts/feast -``` - -## 4. Spark operator installation - -Follow the documentation [to install Spark operator on Kubernetes ](https://github.com/GoogleCloudPlatform/spark-on-k8s-operator), and Feast documentation to [configure Spark roles](../../reference-1/feast-and-spark.md) - -```bash -helm repo add spark-operator https://googlecloudplatform.github.io/spark-on-k8s-operator -helm install my-release spark-operator/spark-operator --set serviceAccounts.spark.name=spark --set image.tag=v1beta2-1.1.2-2.4.5 -``` - -and ensure the service account used by Feast has permissions to manage Spark Application resources. This depends on your k8s setup, but typically you'd need to configure a Role and a RoleBinding like the one below: - -```text -cat < -rules: -- apiGroups: ["sparkoperator.k8s.io"] - resources: ["sparkapplications"] - verbs: ["create", "delete", "deletecollection", "get", "list", "update", "watch", "patch"] ---- -apiVersion: rbac.authorization.k8s.io/v1beta1 -kind: RoleBinding -metadata: - name: use-spark-operator - namespace: -roleRef: - kind: Role - name: use-spark-operator - apiGroup: rbac.authorization.k8s.io -subjects: - - kind: ServiceAccount - name: default -EOF -``` - -## 5. Use Jupyter to connect to Feast - -After all the pods are in a `RUNNING` state, port-forward to the Jupyter Notebook Server in the cluster: - -```bash -kubectl port-forward \ -$(kubectl get pod -o custom-columns=:metadata.name | grep jupyter) 8888:8888 -``` - -```text -Forwarding from 127.0.0.1:8888 -> 8888 -Forwarding from [::1]:8888 -> 8888 -``` - -You can now connect to the bundled Jupyter Notebook Server at `localhost:8888` and follow the example Jupyter notebook. - -{% embed url="http://localhost:8888/tree?" caption="" %} - -## 6. Environment variables - -If you are running the [Minimal Ride Hailing Example](https://github.com/feast-dev/feast/blob/master/examples/minimal/minimal_ride_hailing.ipynb), you may want to make sure the following environment variables are correctly set: - -```text -demo_data_location = "wasbs://@.blob.core.windows.net/" -os.environ["FEAST_AZURE_BLOB_ACCOUNT_NAME"] = "" -os.environ["FEAST_AZURE_BLOB_ACCOUNT_ACCESS_KEY"] = -os.environ["FEAST_HISTORICAL_FEATURE_OUTPUT_LOCATION"] = "wasbs://@.blob.core.windows.net/out/" -os.environ["FEAST_SPARK_STAGING_LOCATION"] = "wasbs://@.blob.core.windows.net/artifacts/" -os.environ["FEAST_SPARK_LAUNCHER"] = "k8s" -os.environ["FEAST_SPARK_K8S_NAMESPACE"] = "default" -os.environ["FEAST_HISTORICAL_FEATURE_OUTPUT_FORMAT"] = "parquet" -os.environ["FEAST_REDIS_HOST"] = "feast-release-redis-master.default.svc.cluster.local" -os.environ["DEMO_KAFKA_BROKERS"] = "feast-release-kafka.default.svc.cluster.local:9092" -``` - -## 7. Further Reading - -* [Feast Concepts](../../concepts/overview.md) -* [Feast Examples/Tutorials](https://github.com/feast-dev/feast/tree/master/examples) -* [Feast Helm Chart Documentation](https://github.com/feast-dev/feast/blob/master/infra/charts/feast/README.md) -* [Configuring Feast components](../../reference-1/configuration-reference.md) -* [Feast and Spark](../../reference-1/feast-and-spark.md) - diff --git a/docs/feast-on-kubernetes/getting-started/install-feast/kubernetes-azure-aks-with-terraform.md b/docs/feast-on-kubernetes/getting-started/install-feast/kubernetes-azure-aks-with-terraform.md deleted file mode 100644 index 71dd15908d..0000000000 --- a/docs/feast-on-kubernetes/getting-started/install-feast/kubernetes-azure-aks-with-terraform.md +++ /dev/null @@ -1,63 +0,0 @@ -# Azure AKS \(with Terraform\) - -## Overview - -This guide installs Feast on Azure using our [reference Terraform configuration](https://github.com/feast-dev/feast/tree/master/infra/terraform/azure). - -{% hint style="info" %} -The Terraform configuration used here is a greenfield installation that neither assumes anything about, nor integrates with, existing resources in your Azure account. The Terraform configuration presents an easy way to get started, but you may want to customize this set up before using Feast in production. -{% endhint %} - -This Terraform configuration creates the following resources: - -* Kubernetes cluster on Azure AKS -* Kafka managed by HDInsight -* Postgres database for Feast metadata, running as a pod on AKS -* Redis cluster, using Azure Cache for Redis -* [spark-on-k8s-operator](https://github.com/GoogleCloudPlatform/spark-on-k8s-operator) to run Spark -* Staging Azure blob storage container to store temporary data - -## 1. Requirements - -* Create an Azure account and [configure credentials locally](https://docs.microsoft.com/en-us/cli/azure/install-azure-cli) -* Install [Terraform](https://www.terraform.io/) \(tested with 0.13.5\) -* Install [Helm](https://helm.sh/docs/intro/install/) \(tested with v3.4.2\) - -## 2. Configure Terraform - -Create a `.tfvars` file under`feast/infra/terraform/azure`. Name the file. In our example, we use `my_feast.tfvars`. You can see the full list of configuration variables in `variables.tf`. At a minimum, you need to set `name_prefix` and `resource_group`: - -{% code title="my\_feast.tfvars" %} -```typescript -name_prefix = "feast" -resource_group = "Feast" # pre-existing resource group -``` -{% endcode %} - -## 3. Apply - -After completing the configuration, initialize Terraform and apply: - -```bash -$ cd feast/infra/terraform/azure -$ terraform init -$ terraform apply -var-file=my_feast.tfvars -``` - -## 4. Connect to Feast using Jupyter - -After all pods are running, connect to the Jupyter Notebook Server running in the cluster. - -To connect to the remote Feast server you just created, forward a port from the remote k8s cluster to your local machine. - -```bash -kubectl port-forward $(kubectl get pod -o custom-columns=:metadata.name | grep jupyter) 8888:8888 -``` - -```text -Forwarding from 127.0.0.1:8888 -> 8888 -Forwarding from [::1]:8888 -> 8888 -``` - -You can now connect to the bundled Jupyter Notebook Server at `localhost:8888` and follow the example Jupyter notebook. - diff --git a/docs/feast-on-kubernetes/getting-started/install-feast/kubernetes-with-helm.md b/docs/feast-on-kubernetes/getting-started/install-feast/kubernetes-with-helm.md deleted file mode 100644 index 032554d120..0000000000 --- a/docs/feast-on-kubernetes/getting-started/install-feast/kubernetes-with-helm.md +++ /dev/null @@ -1,69 +0,0 @@ -# Kubernetes \(with Helm\) - -## Overview - -This guide installs Feast on an existing Kubernetes cluster, and ensures the following services are running: - -* Feast Core -* Feast Online Serving -* Postgres -* Redis -* Feast Jupyter \(Optional\) -* Prometheus \(Optional\) - -## 1. Requirements - -1. Install and configure [Kubectl](https://kubernetes.io/docs/tasks/tools/install-kubectl/) -2. Install [Helm 3](https://helm.sh/) - -## 2. Preparation - -Add the Feast Helm repository and download the latest charts: - -```text -helm repo add feast-charts https://feast-helm-charts.storage.googleapis.com -helm repo update -``` - -Feast includes a Helm chart that installs all necessary components to run Feast Core, Feast Online Serving, and an example Jupyter notebook. - -Feast Core requires Postgres to run, which requires a secret to be set on Kubernetes: - -```bash -kubectl create secret generic feast-postgresql --from-literal=postgresql-password=password -``` - -## 3. Installation - -Install Feast using Helm. The pods may take a few minutes to initialize. - -```bash -helm install feast-release feast-charts/feast -``` - -## 4. Use Jupyter to connect to Feast - -After all the pods are in a `RUNNING` state, port-forward to the Jupyter Notebook Server in the cluster: - -```bash -kubectl port-forward \ -$(kubectl get pod -l app=feast-jupyter -o custom-columns=:metadata.name) 8888:8888 -``` - -```text -Forwarding from 127.0.0.1:8888 -> 8888 -Forwarding from [::1]:8888 -> 8888 -``` - -You can now connect to the bundled Jupyter Notebook Server at `localhost:8888` and follow the example Jupyter notebook. - -{% embed url="http://localhost:8888/tree?" caption="" %} - -## 5. Further Reading - -* [Feast Concepts](../../concepts/overview.md) -* [Feast Examples/Tutorials](https://github.com/feast-dev/feast/tree/master/examples) -* [Feast Helm Chart Documentation](https://github.com/feast-dev/feast/blob/master/infra/charts/feast/README.md) -* [Configuring Feast components](../../reference-1/configuration-reference.md) -* [Feast and Spark](../../reference-1/feast-and-spark.md) - diff --git a/docs/feast-on-kubernetes/getting-started/install-feast/quickstart.md b/docs/feast-on-kubernetes/getting-started/install-feast/quickstart.md deleted file mode 100644 index b5e50d193c..0000000000 --- a/docs/feast-on-kubernetes/getting-started/install-feast/quickstart.md +++ /dev/null @@ -1,91 +0,0 @@ -# Docker Compose - -{% hint style="success" %} -This guide is meant for exploratory purposes only. It allows users to run Feast locally using Docker Compose instead of Kubernetes. The goal of this guide is for users to be able to quickly try out the full Feast stack without needing to deploy to Kubernetes. It is not meant for production use. -{% endhint %} - -## Overview - -This guide shows you how to deploy Feast using [Docker Compose](https://docs.docker.com/get-started/). Docker Compose allows you to explore the functionality provided by Feast while requiring only minimal infrastructure. - -This guide includes the following containerized components: - -* [A complete Feast deployment](../../concepts/architecture.md) - * Feast Core with Postgres - * Feast Online Serving with Redis. - * Feast Job Service -* A Jupyter Notebook Server with built in Feast example\(s\). For demo purposes only. -* A Kafka cluster for testing streaming ingestion. For demo purposes only. - -## Get Feast - -Clone the latest stable version of Feast from the [Feast repository](https://github.com/feast-dev/feast/): - -```text -git clone https://github.com/feast-dev/feast.git -cd feast/infra/docker-compose -``` - -Create a new configuration file: - -```text -cp .env.sample .env -``` - -## Start Feast - -Start Feast with Docker Compose: - -```text -docker-compose pull && docker-compose up -d -``` - -Wait until all all containers are in a running state: - -```text -docker-compose ps -``` - -## Try our example\(s\) - -You can now connect to the bundled Jupyter Notebook Server running at `localhost:8888` and follow the example Jupyter notebook. - -{% embed url="http://localhost:8888/tree?" caption="" %} - -## Troubleshooting - -### Open ports - -Please ensure that the following ports are available on your host machine: - -* `6565` -* `6566` -* `8888` -* `9094` -* `5432` - -If a port conflict cannot be resolved, you can modify the port mappings in the provided [docker-compose.yml](https://github.com/feast-dev/feast/tree/master/infra/docker-compose) file to use different ports on the host. - -### Containers are restarting or unavailable - -If some of the containers continue to restart, or you are unable to access a service, inspect the logs using the following command: - -```javascript -docker-compose logs -f -t -``` - -If you are unable to resolve the problem, visit [GitHub](https://github.com/feast-dev/feast/issues) to create an issue. - -## Configuration - -The Feast Docker Compose setup can be configured by modifying properties in your `.env` file. - -### Accessing Google Cloud Storage \(GCP\) - -To access Google Cloud Storage as a data source, the Docker Compose installation requires access to a GCP service account. - -* Create a new [service account](https://cloud.google.com/iam/docs/creating-managing-service-accounts) and save a JSON key. -* Grant the service account access to your bucket\(s\). -* Copy the service account to the path you have configured in `.env` under `GCP_SERVICE_ACCOUNT`. -* Restart your Docker Compose setup of Feast. - diff --git a/docs/feast-on-kubernetes/getting-started/learn-feast.md b/docs/feast-on-kubernetes/getting-started/learn-feast.md deleted file mode 100644 index 983799ca9b..0000000000 --- a/docs/feast-on-kubernetes/getting-started/learn-feast.md +++ /dev/null @@ -1,15 +0,0 @@ -# Learn Feast - -Explore the following resources to learn more about Feast: - -* [Concepts](../../) describes all important Feast API concepts. -* [User guide](../user-guide/define-and-ingest-features.md) provides guidance on completing Feast workflows. -* [Examples](https://github.com/feast-dev/feast/tree/master/examples) contains Jupyter notebooks that you can run on your Feast deployment. -* [Advanced](../advanced-1/troubleshooting.md) contains information about both advanced and operational aspects of Feast. -* [Reference](../reference-1/api/) contains detailed API and design documents for advanced users. -* [Contributing](../../contributing/contributing.md) contains resources for anyone who wants to contribute to Feast. - -{% hint style="info" %} -The best way to learn Feast is to use it. Jump over to our [Quickstart](install-feast/quickstart.md) guide to have one of our examples running in no time at all! -{% endhint %} - diff --git a/docs/feast-on-kubernetes/reference-1/README.md b/docs/feast-on-kubernetes/reference-1/README.md deleted file mode 100644 index 02577ad8e3..0000000000 --- a/docs/feast-on-kubernetes/reference-1/README.md +++ /dev/null @@ -1,2 +0,0 @@ -# Reference - diff --git a/docs/feast-on-kubernetes/reference-1/api/README.md b/docs/feast-on-kubernetes/reference-1/api/README.md deleted file mode 100644 index cd75f5bf88..0000000000 --- a/docs/feast-on-kubernetes/reference-1/api/README.md +++ /dev/null @@ -1,17 +0,0 @@ -# API Reference - -Please see the following API specific reference documentation: - -* [Feast Core gRPC API](https://api.docs.feast.dev/grpc/feast/core/coreservice.pb.html): This is the gRPC API used by Feast Core. This API contains RPCs for creating and managing feature sets, stores, projects, and jobs. -* [Feast Serving gRPC API](https://api.docs.feast.dev/grpc/feast/serving/servingservice.pb.html): This is the gRPC API used by Feast Serving. It contains RPCs used for the retrieval of online feature data or historical feature data. -* [Feast gRPC Types](https://api.docs.feast.dev/grpc/feast/types/value.pb): These are the gRPC types used by both Feast Core, Feast Serving, and the Go, Java, and Python clients. -* [Go Client SDK](https://godoc.org/github.com/feast-dev/feast/sdk/go): The Go library used for the retrieval of online features from Feast. -* [Java Client SDK](https://javadoc.io/doc/dev.feast/feast-sdk): The Java library used for the retrieval of online features from Feast. -* [Python SDK](https://api.docs.feast.dev/python/): This is the complete reference to the Feast Python SDK. The SDK is used to manage feature sets, features, jobs, projects, and entities. It can also be used to retrieve training datasets or online features from Feast Serving. - -## Community Contributions - -The following community provided SDKs are available: - -* [Node.js SDK](https://github.com/MichaelHirn/feast-client/): A Node.js SDK written in TypeScript. The SDK can be used to manage feature sets, features, jobs, projects, and entities. - diff --git a/docs/feast-on-kubernetes/reference-1/configuration-reference.md b/docs/feast-on-kubernetes/reference-1/configuration-reference.md deleted file mode 100644 index 6f9a97dabf..0000000000 --- a/docs/feast-on-kubernetes/reference-1/configuration-reference.md +++ /dev/null @@ -1,132 +0,0 @@ -# Configuration Reference - -## Overview - -This reference describes how to configure Feast components: - -* [Feast Core and Feast Online Serving](configuration-reference.md#2-feast-core-serving-and-job-controller) -* [Feast CLI and Feast Python SDK](configuration-reference.md#3-feast-cli-and-feast-python-sdk) -* [Feast Go and Feast Java SDK](configuration-reference.md#4-feast-java-and-go-sdk) - -## 1. Feast Core and Feast Online Serving - -Available configuration properties for Feast Core and Feast Online Serving can be referenced from the corresponding `application.yml` of each component: - -| Component | Configuration Reference | -| :--- | :--- | -| Core | [core/src/main/resources/application.yml](https://github.com/feast-dev/feast-java/blob/master/core/src/main/resources/application.yml) | -| Serving \(Online\) | [serving/src/main/resources/application.yml](https://github.com/feast-dev/feast-java/blob/master/serving/src/main/resources/application.yml) | - -Configuration properties for Feast Core and Feast Online Serving are defined depending on Feast is deployed: - -* [Docker Compose deployment](configuration-reference.md#docker-compose-deployment) - Feast is deployed with Docker Compose. -* [Kubernetes deployment](configuration-reference.md#kubernetes-deployment) - Feast is deployed with Kubernetes. -* [Direct Configuration](configuration-reference.md#direct-configuration) - Feast is built and run from source code. - -## Docker Compose Deployment - -For each Feast component deployed using Docker Compose, configuration properties from `application.yml` can be set at: - -| Component | Configuration Path | -| :--- | :--- | -| Core | `infra/docker-compose/core/core.yml` | -| Online Serving | `infra/docker-compose/serving/online-serving.yml` | - -## Kubernetes Deployment - -The Kubernetes Feast Deployment is configured using `values.yaml` in the [Helm chart](https://github.com/feast-dev/feast-helm-charts) included with Feast: - -```yaml -# values.yaml -feast-core: - enabled: true # whether to deploy the feast-core subchart to deploy Feast Core. - # feast-core subchart specific config. - gcpServiceAccount: - enabled: true - # .... -``` - -A reference of the sub-chart-specific configuration can found in its `values.yml`: - -* [feast-core](https://github.com/feast-dev/feast-java/tree/master/infra/charts/feast-core) -* [feast-serving](https://github.com/feast-dev/feast-java/tree/master/infra/charts/feast-serving) - -Configuration properties can be set via `application-override.yaml` for each component in `values.yaml`: - -```yaml -# values.yaml -feast-core: - # .... - application-override.yaml: - # application.yml config properties for Feast Core. - # ... -``` - -Visit the [Helm chart](https://github.com/feast-dev/feast-helm-charts) included with Feast to learn more about configuration. - -## Direct Configuration - -If Feast is built and running from source, configuration properties can be set directly in the Feast component's `application.yml`: - -| Component | Configuration Path | -| :--- | :--- | -| Core | [core/src/main/resources/application.yml](https://github.com/feast-dev/feast-java/blob/master/core/src/main/resources/application.yml) | -| Serving \(Online\) | [serving/src/main/resources/application.yml](https://github.com/feast-dev/feast-java/blob/master/serving/src/main/resources/application.yml) | - -## 2. Feast CLI and Feast Python SDK - -Configuration options for both the [Feast CLI](../getting-started/connect-to-feast/feast-cli.md) and [Feast Python SDK](https://api.docs.feast.dev/python/) can be defined in the following locations, in order of precedence: - -**1. Command line arguments or initialized arguments:** Passing parameters to the Feast CLI or instantiating the Feast Client object with specific parameters will take precedence above other parameters. - -```bash -# Set option as command line arguments. -feast config set core_url "localhost:6565" -``` - -```python -# Pass options as initialized arguments. -client = Client( - core_url="localhost:6565", - project="default" -) -``` - -**2. Environmental variables:** Environmental variables can be set to provide configuration options. They must be prefixed with `FEAST_`. For example `FEAST_CORE_URL`. - -```bash -FEAST_CORE_URL=my_feast:6565 FEAST_PROJECT=default feast projects list -``` - -**3. Configuration file:** Options with the lowest precedence are configured in the Feast configuration file. Feast looks for or creates this configuration file in `~/.feast/config` if it does not already exist. All options must be defined in the `[general]` section of this file. - -```text -[general] -project = default -core_url = localhost:6565 -``` - -Visit the [available configuration parameters](https://api.docs.feast.dev/python/#module-feast.constants) for Feast Python SDK and Feast CLI to learn more. - -## 3. Feast Java and Go SDK - -The [Feast Java SDK](https://javadoc.io/doc/dev.feast/feast-sdk/latest/com/gojek/feast/package-summary.html) and [Feast Go SDK](https://godoc.org/github.com/feast-dev/feast/sdk/go) are configured via arguments passed when instantiating the respective Clients: - -### Go SDK - -```go -// configure serving host and port. -cli := feast.NewGrpcClient("localhost", 6566) -``` - -Visit the[ Feast Go SDK API reference](https://godoc.org/github.com/feast-dev/feast/sdk/go) to learn more about available configuration parameters. - -### Java SDK - -```java -// configure serving host and port. -client = FeastClient.create(servingHost, servingPort); -``` - -Visit the [Feast Java SDK API reference](https://javadoc.io/doc/dev.feast/feast-sdk/latest/com/gojek/feast/package-summary.html) to learn more about available configuration parameters. - diff --git a/docs/feast-on-kubernetes/reference-1/feast-and-spark.md b/docs/feast-on-kubernetes/reference-1/feast-and-spark.md deleted file mode 100644 index be05f177ae..0000000000 --- a/docs/feast-on-kubernetes/reference-1/feast-and-spark.md +++ /dev/null @@ -1,83 +0,0 @@ ---- -description: Configuring Feast to use Spark for ingestion. ---- - -# Feast and Spark - -Feast relies on Spark to ingest data from the offline store to the online store, streaming ingestion, and running queries to retrieve historical data from the offline store. Feast supports several Spark deployment options. - -## Option 1. Use Kubernetes Operator for Apache Spark - -To install the Spark on K8s Operator - -```bash -helm repo add spark-operator \ - https://googlecloudplatform.github.io/spark-on-k8s-operator - -helm install my-release spark-operator/spark-operator \ - --set serviceAccounts.spark.name=spark -``` - -Currently Feast is tested using `v1beta2-1.1.2-2.4.5`version of the operator image. To configure Feast to use it, set the following options in Feast config: - -| Feast Setting | Value | -| :--- | :--- | -| `SPARK_LAUNCHER` | `"k8s"` | -| `SPARK_STAGING_LOCATION` | S3/GCS/Azure Blob Storage URL to use as a staging location, must be readable and writable by Feast. For S3, use `s3a://` prefix here. Ex.: `s3a://some-bucket/some-prefix/artifacts/` | -| `HISTORICAL_FEATURE_OUTPUT_LOCATION` | S3/GCS/Azure Blob Storage URL used to store results of historical retrieval queries, must be readable and writable by Feast. For S3, use `s3a://` prefix here. Ex.: `s3a://some-bucket/some-prefix/out/` | -| `SPARK_K8S_NAMESPACE` | Only needs to be set if you are customizing the spark-on-k8s-operator. The name of the Kubernetes namespace to run Spark jobs in. This should match the value of `sparkJobNamespace` set on spark-on-k8s-operator Helm chart. Typically this is also the namespace Feast itself will run in. | -| `SPARK_K8S_JOB_TEMPLATE_PATH` | Only needs to be set if you are customizing the Spark job template. Local file path with the template of the SparkApplication resource. No prefix required. Ex.: `/home/jovyan/work/sparkapp-template.yaml`. An example template is [here](https://github.com/feast-dev/feast/blob/4059a21dc4eba9cd27b2d5b0fabe476c07a8b3bd/sdk/python/feast/pyspark/launchers/k8s/k8s_utils.py#L280-L317) and the spec is defined in the [k8s-operator User Guide](https://github.com/GoogleCloudPlatform/spark-on-k8s-operator/blob/master/docs/user-guide.md). | - -Lastly, make sure that the service account used by Feast has permissions to manage Spark Application resources. This depends on your k8s setup, but typically you'd need to configure a Role and a RoleBinding like the one below: - -```text -cat < - - - Limitation - Motivation - - - - - Features names and entity names cannot overlap in feature table definitions - Features and entities become columns in historical stores which may cause - conflicts - - - -

The following field names are reserved in feature tables

- - - These keywords are used for column names when persisting metadata in historical - stores - - - - -### Ingestion - -| Limitation | Motivation | -| :--- | :--- | -| Once data has been ingested into Feast, there is currently no way to delete the data without manually going to the database and deleting it. However, during retrieval only the latest rows will be returned for a specific key \(`event_timestamp`, `entity`\) based on its `created_timestamp`. | This functionality simply doesn't exist yet as a Feast API | - -### Storage - -| Limitation | Motivation | -| :--- | :--- | -| Feast does not support offline storage in Feast 0.8 | As part of our re-architecture of Feast, we moved from GCP to cloud-agnostic deployments. Developing offline storage support that is available in all cloud environments is a pending action. | - diff --git a/docs/feast-on-kubernetes/reference-1/metrics-reference.md b/docs/feast-on-kubernetes/reference-1/metrics-reference.md deleted file mode 100644 index 78f94bc390..0000000000 --- a/docs/feast-on-kubernetes/reference-1/metrics-reference.md +++ /dev/null @@ -1,178 +0,0 @@ -# Metrics Reference - -{% hint style="warning" %} -This page applies to Feast 0.7. The content may be out of date for Feast 0.8+ -{% endhint %} - -Reference of the metrics that each Feast component exports: - -* [Feast Core](metrics-reference.md#feast-core) -* [Feast Serving](metrics-reference.md#feast-serving) -* [Feast Ingestion Job](metrics-reference.md#feast-ingestion-job) - -For how to configure Feast to export Metrics, see the [Metrics user guide.](../advanced-1/metrics.md) - -## Feast Core - -**Exported Metrics** - -Feast Core exports the following metrics: - -| Metrics | Description | Tags | -| :--- | :--- | :--- | -| `feast_core_request_latency_seconds` | Feast Core's latency in serving Requests in Seconds. | `service`, `method`, `status_code` | -| `feast_core_feature_set_total` | No. of Feature Sets registered with Feast Core. | None | -| `feast_core_store_total` | No. of Stores registered with Feast Core. | None | -| `feast_core_max_memory_bytes` | Max amount of memory the Java virtual machine will attempt to use. | None | -| `feast_core_total_memory_bytes` | Total amount of memory in the Java virtual machine | None | -| `feast_core_free_memory_bytes` | Total amount of free memory in the Java virtual machine. | None | -| `feast_core_gc_collection_seconds` | Time spent in a given JVM garbage collector in seconds. | None | - -**Metric Tags** - -Exported Feast Core metrics may be filtered by the following tags/keys - -| Tag | Description | -| :--- | :--- | -| `service` | Name of the Service that request is made to. Should be set to `CoreService` | -| `method` | Name of the Method that the request is calling. \(ie `ListFeatureSets`\) | -| `status_code` | Status code returned as a result of handling the requests \(ie `OK`\). Can be used to find request failures. | - -## Feast Serving - -**Exported Metrics** - -Feast Serving exports the following metrics: - -| Metric | Description | Tags | -| :--- | :--- | :--- | -| `feast_serving_request_latency_seconds` | Feast Serving's latency in serving Requests in Seconds. | `method` | -| `feast_serving_request_feature_count` | No. of requests retrieving a Feature from Feast Serving. | `project`, `feature_name` | -| `feast_serving_not_found_feature_count` | No. of requests retrieving a Feature has resulted in a [`NOT_FOUND` field status.](../user-guide/getting-training-features.md#online-field-statuses) | `project`, `feature_name` | -| `feast_serving_stale_feature_count` | No. of requests retrieving a Feature resulted in a [`OUTSIDE_MAX_AGE` field status.](../user-guide/getting-training-features.md#online-field-statuses) | `project`, `feature_name` | -| `feast_serving_grpc_request_count` | Total gRPC requests served. | `method` | - -**Metric Tags** - -Exported Feast Serving metrics may be filtered by the following tags/keys - -| Tag | Description | -| :--- | :--- | -| `method` | Name of the Method that the request is calling. \(ie `ListFeatureSets`\) | -| `status_code` | Status code returned as a result of handling the requests \(ie `OK`\). Can be used to find request failures. | -| `project` | Name of the project that the FeatureSet of the Feature retrieved belongs to. | -| `feature_name` | Name of the Feature being retrieved. | - -## Feast Ingestion Job - -Feast Ingestion computes both metrics an statistics on [data ingestion.](../user-guide/define-and-ingest-features.md) Make sure you familar with data ingestion concepts before proceeding. - -**Metrics Namespace** - -Metrics are computed at two stages of the Feature Row's/Feature Value's life cycle when being processed by the Ingestion Job: - -* `Inflight`- Prior to writing data to stores, but after successful validation of data. -* `WriteToStoreSucess`- After a successful store write. - -Metrics processed by each staged will be tagged with `metrics_namespace` to the stage where the metric was computed. - -**Metrics Bucketing** - -Metrics with a `{BUCKET}` are computed on a 60 second window/bucket. Suffix with the following to select the bucket to use: - -* `min` - minimum value. -* `max` - maximum value. -* `mean`- mean value. -* `percentile_90`- 90 percentile. -* `percentile_95`- 95 percentile. -* `percentile_99`- 99 percentile. - -**Exported Metrics** - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
MetricDescriptionTags
feast_ingestion_feature_row_lag_ms_{BUCKET} - Lag time in milliseconds between succeeding ingested Feature Rows. -

feast_store, feast_project_name,feast_featureSet_name,ingestion_job_name,

-

metrics_namespace -

-
feast_ingestion_feature_value_lag_ms_{BUCKET} - Lag time in milliseconds between succeeding ingested values for each Feature. -

feast_store, feast_project_name,feast_featureSet_name,

-

feast_feature_name,

-

ingestion_job_name,

-

metrics_namespace -

-
feast_ingestion_feature_value_{BUCKET} - Last value feature for each Feature.feast_store, feature_project_name, feast_feature_name,feast_featureSet_name, ingest_job_name, metrics_namepace -
feast_ingestion_feature_row_ingested_count - No. of Ingested Feature Rows -

feast_store, feast_project_name,feast_featureSet_name,ingestion_job_name,

-

metrics_namespace -

-
feast_ingestion_feature_value_missing_count - No. of times a ingested Feature values did not provide a value for the - Feature. -

feast_store, feast_project_name,feast_featureSet_name,

-

feast_feature_name,

-

ingestion_job_name,

-

metrics_namespace -

-
feast_ingestion_deadletter_row_count - No. of Feature Rows that that the Ingestion Job did not successfully write - to store.feast_store, feast_project_name,feast_featureSet_name,ingestion_job_name -
- -**Metric Tags** - -Exported Feast Ingestion Job metrics may be filtered by the following tags/keys - -| Tag | Description | -| :--- | :--- | -| `feast_store` | Name of the target store the Ingestion Job is writing to. | -| `feast_project_name` | Name of the project that the ingested FeatureSet belongs to. | -| `feast_featureSet_name` | Name of the Feature Set being ingested. | -| `feast_feature_name` | Name of the Feature being ingested. | -| `ingestion_job_name` | Name of the Ingestion Job performing data ingestion. Typically this is set to the Id of the Ingestion Job. | -| `metrics_namespace` | Stage where metrics where computed. Either `Inflight` or `WriteToStoreSuccess` | - diff --git a/docs/feast-on-kubernetes/tutorials-1/README.md b/docs/feast-on-kubernetes/tutorials-1/README.md deleted file mode 100644 index 84ce15b788..0000000000 --- a/docs/feast-on-kubernetes/tutorials-1/README.md +++ /dev/null @@ -1,2 +0,0 @@ -# Tutorials - diff --git a/docs/feast-on-kubernetes/user-guide/README.md b/docs/feast-on-kubernetes/user-guide/README.md deleted file mode 100644 index be02a73372..0000000000 --- a/docs/feast-on-kubernetes/user-guide/README.md +++ /dev/null @@ -1,2 +0,0 @@ -# User guide - diff --git a/docs/feast-on-kubernetes/user-guide/define-and-ingest-features.md b/docs/feast-on-kubernetes/user-guide/define-and-ingest-features.md deleted file mode 100644 index 5a7e7288ec..0000000000 --- a/docs/feast-on-kubernetes/user-guide/define-and-ingest-features.md +++ /dev/null @@ -1,52 +0,0 @@ -# Define and ingest features - -In order to retrieve features for both training and serving, Feast requires data being ingested into its offline and online stores. - -Users are expected to already have either a batch or stream source with data stored in it, ready to be ingested into Feast. Once a feature table \(with the corresponding sources\) has been registered with Feast, it is possible to load data from this source into stores. - -The following depicts an example ingestion flow from a data source to the online store. - -## Batch Source to Online Store - -```python -from feast import Client -from datetime import datetime, timedelta - -client = Client(core_url="localhost:6565") -driver_ft = client.get_feature_table("driver_trips") - -# Initialize date ranges -today = datetime.now() -yesterday = today - timedelta(1) - -# Launches a short-lived job that ingests data over the provided date range. -client.start_offline_to_online_ingestion( - driver_ft, yesterday, today -) -``` - -## Stream Source to Online Store - -```python -from feast import Client -from datetime import datetime, timedelta - -client = Client(core_url="localhost:6565") -driver_ft = client.get_feature_table("driver_trips") - -# Launches a long running streaming ingestion job -client.start_stream_to_online_ingestion(driver_ft) -``` - -## Batch Source to Offline Store - -{% hint style="danger" %} -Not supported in Feast 0.8 -{% endhint %} - -## Stream Source to Offline Store - -{% hint style="danger" %} -Not supported in Feast 0.8 -{% endhint %} - diff --git a/docs/feast-on-kubernetes/user-guide/extending-feast.md b/docs/feast-on-kubernetes/user-guide/extending-feast.md deleted file mode 100644 index b124e2f948..0000000000 --- a/docs/feast-on-kubernetes/user-guide/extending-feast.md +++ /dev/null @@ -1,106 +0,0 @@ -# Extending Feast - -## Custom OnlineStore - -Feast allow users to create their own OnlineStore implementations, allowing Feast to read and write feature values to stores other than first-party implementations already in Feast directly. The interface for the is found at [here](https://github.com/feast-dev/feast/blob/master/sdk/python/feast/infra/online_stores/online_store.py), and consists of four methods that need to be implemented. - -### Update/Teardown methods - -The `update` method is should be set up any state in the OnlineStore that is required before any data can be ingested into it. This can be things like tables in sqlite, or keyspaces in Cassandra, etc. The update method should be idempotent. Similarly, the `teardown` method should remove any state in the online store. - -```python -def update( - self, - config: RepoConfig, - tables_to_delete: Sequence[Union[FeatureTable, FeatureView]], - tables_to_keep: Sequence[Union[FeatureTable, FeatureView]], - entities_to_delete: Sequence[Entity], - entities_to_keep: Sequence[Entity], - partial: bool, -): - ... - -def teardown( - self, - config: RepoConfig, - tables: Sequence[Union[FeatureTable, FeatureView]], - entities: Sequence[Entity], -): - ... - -``` - -### Write/Read methods - -The `online_write_batch` method is responsible for writing the data into the online store - and `online_read` method is responsible for reading data from the online store. - -```python -def online_write_batch( - self, - config: RepoConfig, - table: Union[FeatureTable, FeatureView], - data: List[ - Tuple[EntityKeyProto, Dict[str, ValueProto], datetime, Optional[datetime]] - ], - progress: Optional[Callable[[int], Any]], -) -> None: - - ... - -def online_read( - self, - config: RepoConfig, - table: Union[FeatureTable, FeatureView], - entity_keys: List[EntityKeyProto], - requested_features: Optional[List[str]] = None, -) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]: - ... -``` - -## Custom OfflineStore - -Feast allow users to create their own OfflineStore implementations, allowing Feast to read and write feature values to stores other than first-party implementations already in Feast directly. The interface for the is found at [here](https://github.com/feast-dev/feast/blob/master/sdk/python/feast/infra/offline_stores/offline_store.py), and consists of two methods that need to be implemented. - -### Write method - -The `pull_latest_from_table_or_query` method is used to read data from a source for materialization into the OfflineStore. - -```python -def pull_latest_from_table_or_query( - data_source: DataSource, - join_key_columns: List[str], - feature_name_columns: List[str], - event_timestamp_column: str, - created_timestamp_column: Optional[str], - start_date: datetime, - end_date: datetime, -) -> pyarrow.Table: - ... - -``` - -### Read method - -The read method is responsible for reading historical features from the OfflineStore. The feature retrieval may be asynchronous, so the read method is expected to return an object that should produce a DataFrame representing the historical features once the feature retrieval job is complete. - -```python -class RetrievalJob: - - @abstractmethod - def to_df(self): - pass - -def get_historical_features( - config: RepoConfig, - feature_views: List[FeatureView], - feature_refs: List[str], - entity_df: Union[pd.DataFrame, str], - registry: Registry, - project: str, -) -> RetrievalJob: - pass - -``` - - - diff --git a/docs/feast-on-kubernetes/user-guide/getting-online-features.md b/docs/feast-on-kubernetes/user-guide/getting-online-features.md deleted file mode 100644 index c16dc08a01..0000000000 --- a/docs/feast-on-kubernetes/user-guide/getting-online-features.md +++ /dev/null @@ -1,54 +0,0 @@ -# Getting online features - -Feast provides an API through which online feature values can be retrieved. This allows teams to look up feature values at low latency in production during model serving, in order to make online predictions. - -{% hint style="info" %} -Online stores only maintain the current state of features, i.e latest feature values. No historical data is stored or served. -{% endhint %} - -```python -from feast import Client - -online_client = Client( - core_url="localhost:6565", - serving_url="localhost:6566", -) - -entity_rows = [ - {"driver_id": 1001}, - {"driver_id": 1002}, -] - -# Features in format -feature_refs = [ - "driver_trips:average_daily_rides", - "driver_trips:maximum_daily_rides", - "driver_trips:rating", -] - -response = online_client.get_online_features( - feature_refs=feature_refs, # Contains only feature references - entity_rows=entity_rows, # Contains only entities (driver ids) -) - -# Print features in dictionary format -response_dict = response.to_dict() -print(response_dict) -``` - -The online store must be populated through [ingestion jobs](define-and-ingest-features.md#batch-source-to-online-store) prior to being used for online serving. - -Feast Serving provides a [gRPC API](https://api.docs.feast.dev/grpc/feast.serving.pb.html) that is backed by [Redis](https://redis.io/). We have native clients in [Python](https://api.docs.feast.dev/python/), [Go](https://godoc.org/github.com/gojek/feast/sdk/go), and [Java](https://javadoc.io/doc/dev.feast). - -### Online Field Statuses - -Feast also returns status codes when retrieving features from the Feast Serving API. These status code give useful insight into the quality of data being served. - -| Status | Meaning | -| :--- | :--- | -| NOT\_FOUND | The feature value was not found in the online store. This might mean that no feature value was ingested for this feature. | -| NULL\_VALUE | A entity key was successfully found but no feature values had been set. This status code should not occur during normal operation. | -| OUTSIDE\_MAX\_AGE | The age of the feature row in the online store \(in terms of its event timestamp\) has exceeded the maximum age defined within the feature table. | -| PRESENT | The feature values have been found and are within the maximum age. | -| UNKNOWN | Indicates a system failure. | - diff --git a/docs/feast-on-kubernetes/user-guide/getting-training-features.md b/docs/feast-on-kubernetes/user-guide/getting-training-features.md deleted file mode 100644 index 8bac2db5a7..0000000000 --- a/docs/feast-on-kubernetes/user-guide/getting-training-features.md +++ /dev/null @@ -1,72 +0,0 @@ -# Getting training features - -Feast provides a historical retrieval interface for exporting feature data in order to train machine learning models. Essentially, users are able to enrich their data with features from any feature tables. - -### Retrieving historical features - -Below is an example of the process required to produce a training dataset: - -```python -# Feature references with target feature -feature_refs = [ - "driver_trips:average_daily_rides", - "driver_trips:maximum_daily_rides", - "driver_trips:rating", - "driver_trips:rating:trip_completed", -] - -# Define entity source -entity_source = FileSource( - "event_timestamp", - ParquetFormat(), - "gs://some-bucket/customer" -) - -# Retrieve historical dataset from Feast. -historical_feature_retrieval_job = client.get_historical_features( - feature_refs=feature_refs, - entity_rows=entity_source -) - -output_file_uri = historical_feature_retrieval_job.get_output_file_uri() -``` - -#### 1. Define feature references - -[Feature references]() define the specific features that will be retrieved from Feast. These features can come from multiple feature tables. The only requirement is that the feature tables that make up the feature references have the same entity \(or composite entity\). - -**2. Define an entity dataframe** - -Feast needs to join feature values onto specific entities at specific points in time. Thus, it is necessary to provide an [entity dataframe]() as part of the `get_historical_features` method. In the example above we are defining an entity source. This source is an external file that provides Feast with the entity dataframe. - -**3. Launch historical retrieval job** - -Once the feature references and an entity source are defined, it is possible to call `get_historical_features()`. This method launches a job that extracts features from the sources defined in the provided feature tables, joins them onto the provided entity source, and returns a reference to the training dataset that is produced. - -Please see the [Feast SDK](https://api.docs.feast.dev/python) for more details. - -### Point-in-time Joins - -Feast always joins features onto entity data in a point-in-time correct way. The process can be described through an example. - -In the example below there are two tables \(or dataframes\): - -* The dataframe on the left is the [entity dataframe]() that contains timestamps, entities, and the target variable \(trip\_completed\). This dataframe is provided to Feast through an entity source. -* The dataframe on the right contains driver features. This dataframe is represented in Feast through a feature table and its accompanying data source\(s\). - -The user would like to have the driver features joined onto the entity dataframe to produce a training dataset that contains both the target \(trip\_completed\) and features \(average\_daily\_rides, maximum\_daily\_rides, rating\). This dataset will then be used to train their model. - -![](../../.gitbook/assets/point_in_time_join%20%281%29%20%282%29%20%282%29%20%283%29%20%283%29%20%283%29%20%283%29%20%282%29.png) - -Feast is able to intelligently join feature data with different timestamps to a single entity dataframe. It does this through a point-in-time join as follows: - -1. Feast loads the entity dataframe and all feature tables \(driver dataframe\) into the same location. This can either be a database or in memory. -2. For each [entity row]() in the [entity dataframe](getting-online-features.md), Feast tries to find feature values in each feature table to join to it. Feast extracts the timestamp and entity key of each row in the entity dataframe and scans backward through the feature table until it finds a matching entity key. -3. If the event timestamp of the matching entity key within the driver feature table is within the maximum age configured for the feature table, then the features at that entity key are joined onto the entity dataframe. If the event timestamp is outside of the maximum age, then only null values are returned. -4. If multiple entity keys are found with the same event timestamp, then they are deduplicated by the created timestamp, with newer values taking precedence. -5. Feast repeats this joining process for all feature tables and returns the resulting dataset. - -{% hint style="info" %} -Point-in-time correct joins attempts to prevent the occurrence of feature leakage by trying to recreate the state of the world at a single point in time, instead of joining features based on exact timestamps only. -{% endhint %} - diff --git a/docs/feast-on-kubernetes/user-guide/overview.md b/docs/feast-on-kubernetes/user-guide/overview.md deleted file mode 100644 index 5f36792479..0000000000 --- a/docs/feast-on-kubernetes/user-guide/overview.md +++ /dev/null @@ -1,32 +0,0 @@ -# Overview - -### Using Feast - -Feast development happens through three key workflows: - -1. [Define and load feature data into Feast](define-and-ingest-features.md) -2. [Retrieve historical features for training models](getting-training-features.md) -3. [Retrieve online features for serving models](getting-online-features.md) - -### Defining feature tables and ingesting data into Feast - -Feature creators model the data within their organization into Feast through the definition of [feature tables](../concepts/feature-tables.md) that contain [data sources](../concepts/sources.md). Feature tables are both a schema and a means of identifying data sources for features, and allow Feast to know how to interpret your data, and where to find it. - -After registering a feature table with Feast, users can trigger an ingestion from their data source into Feast. This loads feature values from an upstream data source into Feast stores through ingestion jobs. - -Visit [feature tables](../concepts/feature-tables.md#overview) to learn more about them. - -{% page-ref page="define-and-ingest-features.md" %} - -### Retrieving historical features for training - -In order to generate a training dataset it is necessary to provide both an [entity dataframe ]()and feature references through the[ Feast SDK](https://api.docs.feast.dev/python/) to retrieve historical features. For historical serving, Feast requires that you provide the entities and timestamps for the corresponding feature data. Feast produces a point-in-time correct dataset using the requested features. These features can be requested from an unlimited number of feature sets. - -{% page-ref page="getting-training-features.md" %} - -### Retrieving online features for online serving - -Online retrieval uses feature references through the [Feast Online Serving API](https://api.docs.feast.dev/grpc/feast.serving.pb.html) to retrieve online features. Online serving allows for very low latency requests to feature data at very high throughput. - -{% page-ref page="getting-online-features.md" %} - diff --git a/docs/feature-views.md b/docs/feature-views.md deleted file mode 100644 index 235b828835..0000000000 --- a/docs/feature-views.md +++ /dev/null @@ -1,2 +0,0 @@ -# Feature Views - diff --git a/docs/getting-started/architecture-and-components/README.md b/docs/getting-started/architecture-and-components/README.md new file mode 100644 index 0000000000..c3286b8315 --- /dev/null +++ b/docs/getting-started/architecture-and-components/README.md @@ -0,0 +1,16 @@ +# Architecture + +{% page-ref page="overview.md" %} + +{% page-ref page="feature-repository.md" %} + +{% page-ref page="registry.md" %} + +{% page-ref page="offline-store.md" %} + +{% page-ref page="online-store.md" %} + +{% page-ref page="provider.md" %} + + + diff --git a/docs/getting-started/architecture-and-components/feature-repository.md b/docs/getting-started/architecture-and-components/feature-repository.md new file mode 100644 index 0000000000..d231600eb8 --- /dev/null +++ b/docs/getting-started/architecture-and-components/feature-repository.md @@ -0,0 +1,27 @@ +# Feature repository + +Feast users use Feast to manage two important sets of configuration: + +* Configuration about how to run Feast on your infrastructure +* Feature definitions + +With Feast, the above configuration can be written declaratively and stored as code in a central location. This central location is called a feature repository. The feature repository is the declarative source of truth for what the desired state of a feature store should be. + +The Feast CLI uses the feature repository to configure, deploy, and manage your feature store. + +An example structure of a feature repository is shown below: + +```text +$ tree -a +. +├── data +│ └── driver_stats.parquet +├── driver_features.py +├── feature_store.yaml +└── .feastignore + +1 directory, 4 files +``` + +For more details, see the [Feature repository](../../reference/feature-repository/) reference. + diff --git a/docs/concepts/offline-store.md b/docs/getting-started/architecture-and-components/offline-store.md similarity index 69% rename from docs/concepts/offline-store.md rename to docs/getting-started/architecture-and-components/offline-store.md index a5f059ba18..96914db9d0 100644 --- a/docs/concepts/offline-store.md +++ b/docs/getting-started/architecture-and-components/offline-store.md @@ -7,9 +7,9 @@ Offline stores are used primarily for two reasons 1. Building training datasets from time-series features. 2. Materializing \(loading\) features from the offline store into an online store in order to serve those features at low latency for prediction. -Offline stores are configured through the [feature\_store.yaml](../reference/offline-stores/). When building training datasets or materializing features into an online store, Feast will use the configured offline store along with the data sources you have defined as part of feature views to execute the necessary data operations. +Offline stores are configured through the [feature\_store.yaml](../../reference/offline-stores/). When building training datasets or materializing features into an online store, Feast will use the configured offline store along with the data sources you have defined as part of feature views to execute the necessary data operations. It is not possible to query all data sources from all offline stores, and only a single offline store can be used at a time. For example, it is not possible to query a BigQuery table from a `File` offline store, nor is it possible for a `BigQuery` offline store to query files from your local file system. -Please see the [Offline Stores](../reference/offline-stores/) reference for more details on configuring offline stores. +Please see the [Offline Stores](../../reference/offline-stores/) reference for more details on configuring offline stores. diff --git a/docs/concepts/online-store.md b/docs/getting-started/architecture-and-components/online-store.md similarity index 75% rename from docs/concepts/online-store.md rename to docs/getting-started/architecture-and-components/online-store.md index 9830701d1e..4f2e44c92c 100644 --- a/docs/concepts/online-store.md +++ b/docs/getting-started/architecture-and-components/online-store.md @@ -6,9 +6,10 @@ The storage schema of features within the online store mirrors that of the data Example batch data source -![](../.gitbook/assets/image%20%286%29.png) +![](../../.gitbook/assets/image%20%286%29.png) Once the above data source is materialized into Feast \(using `feast materialize`\), the feature values will be stored as follows: -![](../.gitbook/assets/image%20%285%29.png) +![](../../.gitbook/assets/image%20%285%29.png) +Features can also be written to the online store via [push sources](https://docs.feast.dev/reference/data-sources/push) \ No newline at end of file diff --git a/docs/concepts/architecture-and-components.md b/docs/getting-started/architecture-and-components/overview.md similarity index 53% rename from docs/concepts/architecture-and-components.md rename to docs/getting-started/architecture-and-components/overview.md index 8c72bdf3b6..bf5c12dcc2 100644 --- a/docs/concepts/architecture-and-components.md +++ b/docs/getting-started/architecture-and-components/overview.md @@ -1,33 +1,31 @@ -# Architecture +# Overview -![Feast Architecture Diagram](../.gitbook/assets/image%20%284%29.png) +![Feast Architecture Diagram](<../../.gitbook/assets/image (4).png>) -#### Functionality +## Functionality * **Create Batch Features:** ELT/ETL systems like Spark and SQL are used to transform data in the batch store. -* **Feast Apply:** The user \(or CI\) publishes versioned controlled feature definitions using `feast apply`. This CLI command updates infrastructure and persists definitions in the object store registry. -* **Feast Materialize:** The user \(or scheduler\) executes `feast materialize` which loads features from the offline store into the online store. +* **Feast Apply:** The user (or CI) publishes versioned controlled feature definitions using `feast apply`. This CLI command updates infrastructure and persists definitions in the object store registry. +* **Feast Materialize:** The user (or scheduler) executes `feast materialize` which loads features from the offline store into the online store. * **Model Training:** A model training pipeline is launched. It uses the Feast Python SDK to retrieve a training dataset and trains a model. * **Get Historical Features:** Feast exports a point-in-time correct training dataset based on the list of features and entity dataframe provided by the model training pipeline. -* **Deploy Model:** The trained model binary \(and list of features\) are deployed into a model serving system. This step is not executed by Feast. +* **Deploy Model:** The trained model binary (and list of features) are deployed into a model serving system. This step is not executed by Feast. * **Prediction:** A backend system makes a request for a prediction from the model serving service. * **Get Online Features:** The model serving service makes a request to the Feast Online Serving service for online features using a Feast SDK. -#### Components +## Components A complete Feast deployment contains the following components: -* **Feast Online Serving:** Provides low-latency access to feature values stores in the online store. This component is optional. Teams can also read feature values directly from the online store if necessary. -* **Feast Registry**: An object store \(GCS, S3\) based registry used to persist feature definitions that are registered with the feature store. Systems can discover feature data by interacting with the registry through the Feast SDK. +* **Feast Registry**: An object store (GCS, S3) based registry used to persist feature definitions that are registered with the feature store. Systems can discover feature data by interacting with the registry through the Feast SDK. * **Feast Python SDK/CLI:** The primary user facing SDK. Used to: * Manage version controlled feature definitions. - * Materialize \(load\) feature values into the online store. + * Materialize (load) feature values into the online store. * Build and retrieve training datasets from the offline store. * Retrieve online features. -* **Online Store:** The online store is a database that stores only the latest feature values for each entity. The online store is populated by materialization jobs. +* **Online Store:** The online store is a database that stores only the latest feature values for each entity. The online store is populated by materialization jobs and from [stream ingestion](../../reference/alpha-stream-ingestion.md). * **Offline Store:** The offline store persists batch data that has been ingested into Feast. This data is used for producing training datasets. Feast does not manage the offline store directly, but runs queries against it. {% hint style="info" %} -Java and Go Clients are also available for online feature retrieval. See [API Reference](../feast-on-kubernetes/reference-1/api/). +Java and Go Clients are also available for online feature retrieval. {% endhint %} - diff --git a/docs/getting-started/architecture-and-components/provider.md b/docs/getting-started/architecture-and-components/provider.md new file mode 100644 index 0000000000..9eadf73ded --- /dev/null +++ b/docs/getting-started/architecture-and-components/provider.md @@ -0,0 +1,10 @@ +# Provider + +A provider is an implementation of a feature store using specific feature store components \(e.g. offline store, online store\) targeting a specific environment \(e.g. GCP stack\). + +Providers orchestrate various components \(offline store, online store, infrastructure, compute\) inside an environment. For example, the `gcp` provider supports [BigQuery](https://cloud.google.com/bigquery) as an offline store and [Datastore](https://cloud.google.com/datastore) as an online store, ensuring that these components can work together seamlessly. Feast has three built-in providers \(`local`, `gcp`, and `aws`\) with default configurations that make it easy for users to start a feature store in a specific environment. These default configurations can be overridden easily. For instance, you can use the `gcp` provider but use Redis as the online store instead of Datastore. + +If the built-in providers are not sufficient, you can create your own custom provider. Please see [this guide](../../how-to-guides/creating-a-custom-provider.md) for more details. + +Please see [feature\_store.yaml](../../reference/feature-repository/feature-store-yaml.md#overview) for configuring providers. + diff --git a/docs/getting-started/architecture-and-components/registry.md b/docs/getting-started/architecture-and-components/registry.md new file mode 100644 index 0000000000..6bbef98d17 --- /dev/null +++ b/docs/getting-started/architecture-and-components/registry.md @@ -0,0 +1,30 @@ +# Registry + +The Feast feature registry is a central catalog of all the feature definitions and their related metadata. It allows data scientists to search, discover, and collaborate on new features. + +Each Feast deployment has a single feature registry. Feast only supports file-based registries today, but supports three different backends + +* `Local`: Used as a local backend for storing the registry during development +* `S3`: Used as a centralized backend for storing the registry on AWS +* `GCS`: Used as a centralized backend for storing the registry on GCP + +The feature registry is updated during different operations when using Feast. More specifically, objects within the registry \(entities, feature views, feature services\) are updated when running `apply` from the Feast CLI, but metadata about objects can also be updated during operations like materialization. + +Users interact with a feature registry through the Feast SDK. Listing all feature views: + +```python +fs = FeatureStore("my_feature_repo/") +print(fs.list_feature_views()) +``` + +Or retrieving a specific feature view: + +```python +fs = FeatureStore("my_feature_repo/") +fv = fs.get_feature_view(“my_fv1”) +``` + +{% hint style="info" %} +The feature registry is a [Protobuf representation](https://github.com/feast-dev/feast/blob/master/protos/feast/core/Registry.proto) of Feast metadata. This Protobuf file can be read programmatically from other programming languages, but no compatibility guarantees are made on the internal structure of the registry. +{% endhint %} + diff --git a/docs/getting-started/concepts/README.md b/docs/getting-started/concepts/README.md new file mode 100644 index 0000000000..7ad0115a72 --- /dev/null +++ b/docs/getting-started/concepts/README.md @@ -0,0 +1,17 @@ +# Concepts + +{% page-ref page="overview.md" %} + +{% page-ref page="data-source.md" %} + +{% page-ref page="entity.md" %} + +{% page-ref page="feature-view.md" %} + +{% page-ref page="feature-service.md" %} + +{% page-ref page="feature-retrieval.md" %} + +{% page-ref page="point-in-time-joins.md" %} + +{% page-ref page="dataset.md" %} diff --git a/docs/getting-started/concepts/data-source.md b/docs/getting-started/concepts/data-source.md new file mode 100644 index 0000000000..d468108ca1 --- /dev/null +++ b/docs/getting-started/concepts/data-source.md @@ -0,0 +1,12 @@ +# Data source + +The data source refers to raw underlying data \(e.g. a table in BigQuery\). + +Feast uses a time-series data model to represent data. This data model is used to interpret feature data in data sources in order to build training datasets or when materializing features into an online store. + +Below is an example data source with a single entity \(`driver`\) and two features \(`trips_today`, and `rating`\). + +![Ride-hailing data source](../../.gitbook/assets/image%20%2816%29.png) + + + diff --git a/docs/getting-started/concepts/dataset.md b/docs/getting-started/concepts/dataset.md new file mode 100644 index 0000000000..59f7168905 --- /dev/null +++ b/docs/getting-started/concepts/dataset.md @@ -0,0 +1,50 @@ +# Dataset + +Feast datasets allow for conveniently saving dataframes that include both features and entities to be subsequently used for data analysis and model training. +[Data Quality Monitoring](https://docs.google.com/document/d/110F72d4NTv80p35wDSONxhhPBqWRwbZXG4f9mNEMd98) was the primary motivation for creating dataset concept. + +Dataset's metadata is stored in the Feast registry and raw data (features, entities, additional input keys and timestamp) is stored in the [offline store](../architecture-and-components/offline-store.md). + +Dataset can be created from: +1. Results of historical retrieval +2. [planned] Logging request (including input for [on demand transformation](../../reference/alpha-on-demand-feature-view.md)) and response during feature serving +3. [planned] Logging features during writing to online store (from batch source or stream) + + +### Creating Saved Dataset from Historical Retrieval + +To create a saved dataset from historical features for later retrieval or analysis, a user needs to call `get_historical_features` method first and then pass the returned retrieval job to `create_saved_dataset` method. +`create_saved_dataset` will trigger provided retrieval job (by calling `.persist()` on it) to store the data using specified `storage`. +Storage type must be the same as globally configured offline store (eg, it's impossible to persist data to Redshift with BigQuery source). +`create_saved_dataset` will also create SavedDataset object with all related metadata and will write it to the registry. + +```python +from feast import FeatureStore +from feast.infra.offline_stores.bigquery_source import SavedDatasetBigQueryStorage + +store = FeatureStore() + +historical_job = store.get_historical_features( + features=["driver:avg_trip"], + entity_df=..., +) + +dataset = store.create_saved_dataset( + from_=historical_job, + name='my_training_dataset', + storage=SavedDatasetBigQueryStorage(table_ref='..my_training_dataset'), + tags={'author': 'oleksii'} +) + +dataset.to_df() +``` + +Saved dataset can be later retrieved using `get_saved_dataset` method: +```python +dataset = store.get_saved_dataset('my_training_dataset') +dataset.to_df() +``` + +--- + +Check out our [tutorial on validating historical features](../../tutorials/validating-historical-features.md) to see how this concept can be applied in real-world use case. \ No newline at end of file diff --git a/docs/getting-started/concepts/entity.md b/docs/getting-started/concepts/entity.md new file mode 100644 index 0000000000..77cfc0aff2 --- /dev/null +++ b/docs/getting-started/concepts/entity.md @@ -0,0 +1,22 @@ +# Entity + +An entity is a collection of semantically related features. Users define entities to map to the domain of their use case. For example, a ride-hailing service could have customers and drivers as their entities, which group related features that correspond to these customers and drivers. + +```python +driver = Entity(name='driver', value_type=ValueType.STRING, join_keys=['driver_id']) +``` + +Entities are typically defined as part of feature views. Entity name is used to reference the entity from a feature view definition and join key is used to identify the physical primary key on which feature values should be stored and retrieved. These keys are used during the lookup of feature values from the online store and the join process in point-in-time joins. It is possible to define composite entities \(more than one entity object\) in a feature view. It is also possible for feature views to have zero entities. See [feature view](feature-view.md) for more details. + +Entities should be reused across feature views. + +## **Entity key** + +A related concept is an entity key. These are one or more entity values that uniquely describe a feature view record. In the case of an entity \(like a `driver`\) that only has a single entity field, the entity _is_ an entity key. However, it is also possible for an entity key to consist of multiple entity values. For example, a feature view with the composite entity of \(customer, country\) might have an entity key of \(1001, 5\). + +![](../../.gitbook/assets/image%20%2815%29.png) + +Entity keys act as primary keys. They are used during the lookup of features from the online store, and they are also used to match feature rows across feature views during point-in-time joins. + + + diff --git a/docs/concepts/data-model-and-concepts.md b/docs/getting-started/concepts/feature-retrieval.md similarity index 52% rename from docs/concepts/data-model-and-concepts.md rename to docs/getting-started/concepts/feature-retrieval.md index d1dcad56e5..bece0f5527 100644 --- a/docs/concepts/data-model-and-concepts.md +++ b/docs/getting-started/concepts/feature-retrieval.md @@ -1,4 +1,4 @@ -# Data model +# Feature retrieval ## Dataset @@ -10,57 +10,32 @@ A dataset is a collection of rows that is produced by a historical retrieval fro ## Feature References -Feature references uniquely identify feature values in Feast. The structure of a feature reference in string form is as follows: `:` +Feature references uniquely identify feature values in Feast. The structure of a feature reference in string form is as follows: `:` Feature references are used for the retrieval of features from Feast: ```python online_features = fs.get_online_features( - feature_refs=[ + features=[ 'driver_locations:lon', 'drivers_activity:trips_today' ], - entity_rows=[{'driver': 'driver_1001'}] + entity_rows=[ + # {join_key: entity_value} + {'driver': 'driver_1001'} + ] ) ``` It is possible to retrieve features from multiple feature views with a single request, and Feast is able to join features from multiple tables in order to build a training dataset. However, It is not possible to reference \(or retrieve\) features from multiple projects at the same time. -## **Entity key** - -Entity keys are one or more entity values that uniquely describe an entity. In the case of an entity \(like a `driver`\) that only has a single entity field, the entity _is_ an entity key. However, it is also possible for an entity key to consist of multiple entity values. For example, a feature view with the composite entity of \(customer, country\) might have an entity key of \(1001, 5\). - -![](../.gitbook/assets/image%20%2815%29.png) - -Entity keys act as primary keys. They are used during the lookup of features from the online store, and they are also used to match feature rows across feature views during point-in-time joins. +{% hint style="info" %} +Note, if you're using [Feature views without entities](feature-view.md#feature-views-without-entities), then those features can be added here without additional entity values in the `entity_rows` +{% endhint %} ## Event timestamp -The timestamp on which an event occurred, as found in a feature view's data source. The entity timestamp describes the event time at which a feature was observed or generated. +The timestamp on which an event occurred, as found in a feature view's data source. The event timestamp describes the event time at which a feature was observed or generated. Event timestamps are used during point-in-time joins to ensure that the latest feature values are joined from feature views onto entity rows. Event timestamps are also used to ensure that old feature values aren't served to models during online serving. -## Entity row - -An entity key at a specific point in time. - -![](../.gitbook/assets/image%20%2811%29.png) - -## Entity dataframe - -A collection of entity rows. Entity dataframes are the "left table" that is enriched with feature values when building training datasets. The entity dataframe is provided to Feast by users during historical retrieval: - -```python -training_df = store.get_historical_features( - entity_df=entity_df, - feature_refs = [ - 'drivers_activity:trips_today' - 'drivers_activity:rating' - ], -) -``` - -Example of an entity dataframe with feature values joined to it: - -![](../.gitbook/assets/image%20%2817%29.png) - diff --git a/docs/getting-started/concepts/feature-service.md b/docs/getting-started/concepts/feature-service.md new file mode 100644 index 0000000000..adb4927113 --- /dev/null +++ b/docs/getting-started/concepts/feature-service.md @@ -0,0 +1,48 @@ +# Feature service + +A feature service is an object that represents a logical group of features from one or more [feature views](feature-view.md#feature-view). Feature Services allows features from within a feature view to be used as needed by an ML model. Users can expect to create one feature service per model, allowing for tracking of the features used by models. + +{% tabs %} +{% tab title="driver_trips_feature_service.py" %} +```python +from driver_ratings_feature_view import driver_ratings_fv +from driver_trips_feature_view import driver_stats_fv + +driver_stats_fs = FeatureService( + name="driver_activity", + features=[driver_stats_fv, driver_ratings_fv[["lifetime_rating"]]] +) +``` +{% endtab %} +{% endtabs %} + +Feature services are used during + +* The generation of training datasets when querying feature views in order to find historical feature values. A single training dataset may consist of features from multiple feature views. +* Retrieval of features from the online store. The features retrieved from the online store may also belong to multiple feature views. + +{% hint style="info" %} +Applying a feature service does not result in an actual service being deployed. +{% endhint %} + +Feature services can be retrieved from the feature store, and referenced when retrieving features from the online store. + +```python +from feast import FeatureStore +feature_store = FeatureStore('.') # Initialize the feature store + +feature_service = feature_store.get_feature_service("driver_activity") +features = feature_store.get_online_features( + features=feature_service, entity_rows=[entity_dict] +) +``` + +Feature services can also be used when retrieving historical features from the offline store. + +```python +from feast import FeatureStore +feature_store = FeatureStore('.') # Initialize the feature store + +feature_service = feature_store.get_feature_service("driver_activity") +feature_store.get_historical_features(features=feature_service, entity_df=entity_df) +``` diff --git a/docs/getting-started/concepts/feature-view.md b/docs/getting-started/concepts/feature-view.md new file mode 100644 index 0000000000..e3decf39c9 --- /dev/null +++ b/docs/getting-started/concepts/feature-view.md @@ -0,0 +1,174 @@ +# Feature view + +## Feature views + +A feature view is an object that represents a logical group of time-series feature data as it is found in a [data source](data-source.md). Feature views consist of zero or more [entities](entity.md), one or more [features](feature-view.md#feature), and a [data source](data-source.md). Feature views allow Feast to model your existing feature data in a consistent way in both an offline (training) and online (serving) environment. Feature views generally contain features that are properties of a specific object, in which case that object is defined as an entity and included in the feature view. If the features are not related to a specific object, the feature view might not have entities; see [feature views without entities](feature-view.md#feature-views-without-entities) below. + +{% tabs %} +{% tab title="driver_trips_feature_view.py" %} +```python +from feast import BigQuerySource, FeatureView, Field +from feast.types import Float32, Int64 + +driver_stats_fv = FeatureView( + name="driver_activity", + entities=["driver"], + schema=[ + Field(name="trips_today", dtype=Int64), + Field(name="rating", dtype=Float32), + ], + source=BigQuerySource( + table="feast-oss.demo_data.driver_activity" + ) +) +``` +{% endtab %} +{% endtabs %} + +Feature views are used during + +* The generation of training datasets by querying the data source of feature views in order to find historical feature values. A single training dataset may consist of features from multiple feature views. +* Loading of feature values into an online store. Feature views determine the storage schema in the online store. Feature values can be loaded from batch sources or from [stream sources](../../reference/data-sources/push.md). +* Retrieval of features from the online store. Feature views provide the schema definition to Feast in order to look up features from the online store. + +{% hint style="info" %} +Feast does not generate feature values. It acts as the ingestion and serving system. The data sources described within feature views should reference feature values in their already computed form. +{% endhint %} + +## Feature views without entities + +If a feature view contains features that are not related to a specific entity, the feature view can be defined without entities (only event timestamps are needed for this feature view). + +{% tabs %} +{% tab title="global_stats.py" %} +```python +from feast import BigQuerySource, FeatureView, Field +from feast.types import Int64 + +global_stats_fv = FeatureView( + name="global_stats", + entities=[], + schema=[ + Field(name="total_trips_today_by_all_drivers", dtype=Int64), + ], + source=BigQuerySource( + table="feast-oss.demo_data.global_stats" + ) +) +``` +{% endtab %} +{% endtabs %} + +## Feature inferencing + +If the `features` parameter is not specified in the feature view creation, Feast will infer the features during `feast apply` by creating a feature for each column in the +underlying data source except the columns corresponding to the entities of the feature view or the columns corresponding to the timestamp columns of the feature view's +data source. The names and value types of the inferred features will use the names and data types of the columns from which the features were inferred. + +## Entity aliasing + +"Entity aliases" can be specified to join `entity_dataframe` columns that do not match the column names in the source table of a FeatureView. + +This could be used if a user has no control over these column names or if there are multiple entities are a subclass of a more general entity. For example, "spammer" and "reporter" could be aliases of a "user" entity, and "origin" and "destination" could be aliases of a "location" entity as shown below. + +It is suggested that you dynamically specify the new FeatureView name using `.with_name` and `join_key_map` override using `.with_join_key_map` instead of needing to register each new copy. + +{% tabs %} +{% tab title="location_stats_feature_view.py" %} +```python +from feast import BigQuerySource, Entity, FeatureView, Field, ValueType +from feast.types import Int32 + +location = Entity(name="location", join_keys=["location_id"], value_type=ValueType.INT64) + +location_stats_fv= FeatureView( + name="location_stats", + entities=["location"], + schema=[ + Field(name="temperature", dtype=Int32) + ], + source=BigQuerySource( + table="feast-oss.demo_data.location_stats" + ), +) +``` +{% endtab %} + +{% tab title="temperatures_feature_service.py" %} +```python +from location_stats_feature_view import location_stats_fv + +temperatures_fs = FeatureService( + name="temperatures", + features=[ + location_stats_fv + .with_name("origin_stats") + .with_join_key_map( + {"location_id": "origin_id"} + ), + location_stats_fv + .with_name("destination_stats") + .with_join_key_map( + {"location_id": "destination_id"} + ), + ], +) +``` +{% endtab %} +{% endtabs %} + +## Feature + +A feature is an individual measurable property. It is typically a property observed on a specific entity, but does not have to be associated with an entity. For example, a feature of a `customer` entity could be the number of transactions they have made on an average month, while a feature that is not observed on a specific entity could be the total number of posts made by all users in the last month. + +Features are defined as part of feature views. Since Feast does not transform data, a feature is essentially a schema that only contains a name and a type: + +```python +from feast import Field +from feast.types import Float32 + +trips_today = Field( + name="trips_today", + dtype=Float32 +) +``` + +Together with [data sources](data-source.md), they indicate to Feast where to find your feature values, e.g., in a specific parquet file or BigQuery table. Feature definitions are also used when reading features from the feature store, using [feature references](feature-retrieval.md#feature-references). + +Feature names must be unique within a [feature view](feature-view.md#feature-view). + +## \[Alpha] On demand feature views + +On demand feature views allows users to use existing features and request time data (features only available at request time) to transform and create new features. Users define python transformation logic which is executed in both historical retrieval and online retrieval paths: + +```python +from feast import Field, RequestSource +from feast.types import Float64 + +# Define a request data source which encodes features / information only +# available at request time (e.g. part of the user initiated HTTP request) +input_request = RequestSource( + name="vals_to_add", + schema=[ + Field(name="val_to_add", dtype=PrimitiveFeastType.INT64), + Field(name="val_to_add_2": dtype=PrimitiveFeastType.INT64), + ] +) + +# Use the input data and feature view features to create new features +@on_demand_feature_view( + sources={ + 'driver_hourly_stats': driver_hourly_stats_view, + 'vals_to_add': input_request + }, + schema=[ + Field(name='conv_rate_plus_val1', dtype=Float64), + Field(name='conv_rate_plus_val2', dtype=Float64) + ] +) +def transformed_conv_rate(features_df: pd.DataFrame) -> pd.DataFrame: + df = pd.DataFrame() + df['conv_rate_plus_val1'] = (features_df['conv_rate'] + features_df['val_to_add']) + df['conv_rate_plus_val2'] = (features_df['conv_rate'] + features_df['val_to_add_2']) + return df +``` diff --git a/docs/concepts/overview.md b/docs/getting-started/concepts/overview.md similarity index 50% rename from docs/concepts/overview.md rename to docs/getting-started/concepts/overview.md index a721be99c5..7134073792 100644 --- a/docs/concepts/overview.md +++ b/docs/getting-started/concepts/overview.md @@ -1,10 +1,10 @@ # Overview -The top-level namespace within Feast is a [project](data-model-and-concepts.md#project). Users define one or more [feature views](data-model-and-concepts.md#feature-view) within a project. Each feature view contains one or more [features](data-model-and-concepts.md#feature) that relate to a specific [entity](data-model-and-concepts.md#entity). A feature view must always have a [data source](data-model-and-concepts.md#data-source), which in turn is used during the generation of training [datasets](data-model-and-concepts.md#dataset) and when materializing feature values into the online store. +The top-level namespace within Feast is a [project](overview.md#project). Users define one or more [feature views](feature-view.md) within a project. Each feature view contains one or more [features](feature-view.md#feature). These features typically relate to one or more [entities](entity.md). A feature view must always have a [data source](data-source.md), which in turn is used during the generation of training [datasets](feature-retrieval.md#dataset) and when materializing feature values into the online store. -![](../.gitbook/assets/image%20%287%29.png) +![](../../.gitbook/assets/image%20%287%29.png) -### Project +## Project Projects provide complete isolation of feature stores at the infrastructure level. This is accomplished through resource namespacing, e.g., prefixing table names with the associated project. Each project should be considered a completely separate universe of entities and features. It is not possible to retrieve features from multiple projects in a single request. We recommend having a single feature store and a single project per environment \(`dev`, `staging`, `prod`\). @@ -12,5 +12,3 @@ Projects provide complete isolation of feature stores at the infrastructure leve Projects are currently being supported for backward compatibility reasons. Projects may change in the future as we simplify the Feast API. {% endhint %} -### - diff --git a/docs/getting-started/concepts/point-in-time-joins.md b/docs/getting-started/concepts/point-in-time-joins.md new file mode 100644 index 0000000000..d2961e2f74 --- /dev/null +++ b/docs/getting-started/concepts/point-in-time-joins.md @@ -0,0 +1,62 @@ +# Point-in-time joins + +Feature values in Feast are modeled as time-series records. Below is an example of a driver feature view with two feature columns \(`trips_today`, and `earnings_today`\): + +![](../../.gitbook/assets/image%20%2836%29.png) + +The above table can be registered with Feast through the following feature view: + +```python +from feast import FeatureView, Field, FileSource +from feast.types import Float32, Int64 +from datetime import timedelta + +driver_stats_fv = FeatureView( + name="driver_hourly_stats", + entities=["driver"], + schema=[ + Field(name="trips_today", dtype=Int64), + Field(name="earnings_today", dtype=Float32), + ], + ttl=timedelta(hours=2), + source=FileSource( + path="driver_hourly_stats.parquet" + ) +) +``` + +Feast is able to join features from one or more feature views onto an entity dataframe in a point-in-time correct way. This means Feast is able to reproduce the state of features at a specific point in the past. + +Given the following entity dataframe, imagine a user would like to join the above `driver_hourly_stats` feature view onto it, while preserving the `trip_success` column: + +![Entity dataframe containing timestamps, driver ids, and the target variable](../../.gitbook/assets/image%20%2823%29.png) + +The timestamps within the entity dataframe above are the events at which we want to reproduce the state of the world \(i.e., what the feature values were at those specific points in time\). In order to do a point-in-time join, a user would load the entity dataframe and run historical retrieval: + +```python +# Read in entity dataframe +entity_df = pd.read_csv("entity_df.csv") + +training_df = store.get_historical_features( + entity_df=entity_df, + features = [ + 'driver_hourly_stats:trips_today', + 'driver_hourly_stats:earnings_today' + ], +) +``` + +For each row within the entity dataframe, Feast will query and join the selected features from the appropriate feature view data source. Feast will scan backward in time from the entity dataframe timestamp up to a maximum of the TTL time. + +![](../../.gitbook/assets/image%20%2831%29.png) + +{% hint style="info" %} +Please note that the TTL time is relative to each timestamp within the entity dataframe. TTL is not relative to the current point in time \(when you run the query\). +{% endhint %} + +Below is the resulting joined training dataframe. It contains both the original entity rows and joined feature values: + +![](../../.gitbook/assets/image%20%2829%29.png) + +Three feature rows were successfully joined to the entity dataframe rows. The first row in the entity dataframe was older than the earliest feature rows in the feature view and could not be joined. The last row in the entity dataframe was outside of the TTL window \(the event happened 11 hours after the feature row\) and also couldn't be joined. + diff --git a/docs/getting-started/connect-to-feast/README.md b/docs/getting-started/connect-to-feast/README.md deleted file mode 100644 index 4333359f90..0000000000 --- a/docs/getting-started/connect-to-feast/README.md +++ /dev/null @@ -1,31 +0,0 @@ -# Connect to Feast - -### Feast Python SDK - -The Feast Python SDK is used as a library to interact with a Feast deployment. - -* Define, register, and manage entities and features -* Ingest data into Feast -* Build and retrieve training datasets -* Retrieve online features - -{% page-ref page="python-sdk.md" %} - -### Feast CLI - -The Feast CLI is a command line implementation of the Feast Python SDK. - -* Define, register, and manage entities and features from the terminal -* Ingest data into Feast -* Manage ingestion jobs - -{% page-ref page="feast-cli.md" %} - -### Online Serving Clients - -The following clients can be used to retrieve online feature values: - -* [Feast Python SDK](https://api.docs.feast.dev/python/) -* [Feast Go SDK](https://godoc.org/github.com/feast-dev/feast/sdk/go) -* [Feast Java SDK](https://javadoc.io/doc/dev.feast/feast-sdk) - diff --git a/docs/getting-started/connect-to-feast/feast-cli.md b/docs/getting-started/connect-to-feast/feast-cli.md deleted file mode 100644 index d15414f360..0000000000 --- a/docs/getting-started/connect-to-feast/feast-cli.md +++ /dev/null @@ -1,37 +0,0 @@ -# Feast CLI - -Install the Feast CLI using pip: - -```bash -pip install feast -``` - -Configure the CLI to connect to your Feast Core deployment: - -```text -feast config set core_url your.feast.deployment -``` - -{% hint style="info" %} -By default, all configuration is stored in `~/.feast/config` -{% endhint %} - -The CLI is a wrapper around the [Feast Python SDK](python-sdk.md): - -```aspnet -$ feast - -Usage: feast [OPTIONS] COMMAND [ARGS]... - -Options: - --help Show this message and exit. - -Commands: - config View and edit Feast properties - entities Create and manage entities - feature-tables Create and manage feature tables - jobs Create and manage jobs - projects Create and manage projects - version Displays version and connectivity information -``` - diff --git a/docs/getting-started/connect-to-feast/python-sdk.md b/docs/getting-started/connect-to-feast/python-sdk.md deleted file mode 100644 index bf31bd3849..0000000000 --- a/docs/getting-started/connect-to-feast/python-sdk.md +++ /dev/null @@ -1,20 +0,0 @@ -# Python SDK - -Install the [Feast Python SDK](https://api.docs.feast.dev/python/) using pip: - -```bash -pip install feast -``` - -Connect to an existing Feast Core deployment: - -```python -from feast import Client - -# Connect to an existing Feast Core deployment -client = Client(core_url='feast.example.com:6565') - -# Ensure that your client is connected by printing out some feature tables -client.list_feature_tables() -``` - diff --git a/docs/getting-started/faq.md b/docs/getting-started/faq.md new file mode 100644 index 0000000000..21bad82312 --- /dev/null +++ b/docs/getting-started/faq.md @@ -0,0 +1,113 @@ +# FAQ + +{% hint style="info" %} +**Don't see your question?** + +We encourage you to ask questions on [Slack](https://slack.feast.dev) or [GitHub](https://github.com/feast-dev/feast). Even better, once you get an answer, add the answer to this FAQ via a [pull request](../project/development-guide.md)! +{% endhint %} + +## Getting started + +### Do you have any examples of how Feast should be used? + +The [quickstart](quickstart.md) is the easiest way to learn about Feast. For more detailed tutorials, please check out the [tutorials](../tutorials/tutorials-overview.md) page. + +## Concepts + +### What is the difference between feature tables and feature views? + +Feature tables from Feast 0.9 have been renamed to feature views in Feast 0.10+. For more details, please see the discussion [here](https://github.com/feast-dev/feast/issues/1583). + +### Do feature views have to include entities? + +No, there are [feature views without entities](concepts/feature-view.md#feature-views-without-entities). + +### What is the difference between data sources and the offline store? + +The data source itself defines the underlying data warehouse table in which the features are stored. The offline store interface defines the APIs required to make an arbitrary compute layer work for Feast (e.g. pulling features given a set of feature views from their sources, exporting the data set results to different formats). Please see [data sources](concepts/data-source.md) and [offline store](architecture-and-components/offline-store.md) for more details. + +### Is it possible to have offline and online stores from different providers? + +Yes, this is possible. For example, you can use BigQuery as an offline store and Redis as an online store. + +## Functionality + +### Does Feast provide security or access control? + +Feast currently does not support any access control other than the access control required for the Provider's environment (for example, GCP and AWS permissions). + +### Does Feast support streaming sources? + +Yes. In earlier versions of Feast, we used Feast Spark to manage ingestion from stream sources. In the current version of Feast, we support [push based ingestion](../reference/data-sources/push.md). + +### Does Feast support composite keys? + +A feature view can be defined with multiple entities. Since each entity has a unique join\_key, using multiple entities will achieve the effect of a composite key. + +### How does Feast compare with Tecton? + +Please see a detailed comparison of Feast vs. Tecton [here](https://www.tecton.ai/feast/). For another comparison, please see [here](https://mlops.community/learn/feature-store/). + +### What are the performance/latency characteristics of Feast? + +Feast is designed to work at scale and support low latency online serving. Benchmarks ([RFC](https://docs.google.com/document/d/12UuvTQnTTCJhdRgy6h10zSbInNGSyEJkIxpOcgOen1I/edit)) will be released soon, and active work is underway to support very latency sensitive use cases. + +### Does Feast support embeddings and list features? + +Yes. Specifically: + +* Simple lists / dense embeddings: + * BigQuery supports list types natively + * Redshift does not support list types, so you'll need to serialize these features into strings (e.g. json or protocol buffers) + * Feast's implementation of online stores serializes features into Feast protocol buffers and supports list types (see [reference](https://github.com/feast-dev/feast/blob/master/docs/specs/online\_store\_format.md#appendix-a-value-proto-format)) +* Sparse embeddings (e.g. one hot encodings) + * One way to do this efficiently is to have a protobuf or string representation of [https://www.tensorflow.org/guide/sparse\_tensor](https://www.tensorflow.org/guide/sparse\_tensor) + +### Does Feast support X storage engine? + +The list of supported offline and online stores can be found [here](../reference/offline-stores/) and [here](../reference/online-stores/), respectively. The [roadmap](../roadmap.md) indicates the stores for which we are planning to add support. Finally, our Provider abstraction is built to be extensible, so you can plug in your own implementations of offline and online stores. Please see more details about custom providers [here](../how-to-guides/creating-a-custom-provider.md). + +### Does Feast support using different clouds for offline vs online stores? + +Yes. Using a GCP or AWS provider in `feature_store.yaml` primarily sets default offline / online stores and configures where the remote registry file can live (Using the AWS provider also allows for deployment to AWS Lambda). You can override the offline and online stores to be in different clouds if you wish. + +### How can I add a custom online store? + +Please follow the instructions [here](../how-to-guides/adding-support-for-a-new-online-store.md). + +### Can the same storage engine be used for both the offline and online store? + +Yes. For example, the Postgres [connector](https://github.com/nossrannug/feast-postgres) can be used as both an offline and online store. + +### Does Feast support S3 as a data source? + +Yes. There are two ways to use S3 in Feast: + +* Using Redshift as a data source via Spectrum ([AWS tutorial](https://docs.aws.amazon.com/redshift/latest/dg/tutorial-nested-data-create-table.html)), and then continuing with the [Running Feast with Snowflake/GCP/AWS](../how-to-guides/feast-snowflake-gcp-aws/) guide. See a [presentation](https://youtu.be/pMFbRJ7AnBk?t=9463) we did on this at our apply() meetup. +* Using the `s3_endpoint_override` in a `FileSource` data source. This endpoint is more suitable for quick proof of concepts that won't necessarily scale for production use cases. + +### How can I use Spark with Feast? + +Feast does not support Spark natively. However, you can create a [custom provider](../how-to-guides/creating-a-custom-provider.md) that will support Spark, which can help with more scalable materialization and ingestion. + +### Is Feast planning on supporting X functionality? + +Please see the [roadmap](../roadmap.md). + +## Project + +### What is the difference between Feast 0.9 and Feast 0.10+? + +Feast 0.10+ is much lighter weight and more extensible than Feast 0.9. It is designed to be simple to install and use. Please see this [document](https://docs.google.com/document/d/1AOsr\_baczuARjCpmZgVd8mCqTF4AZ49OEyU4Cn-uTT0) for more details. + +### How do I migrate from Feast 0.9 to Feast 0.10+? + +Please see this [document](https://docs.google.com/document/d/1AOsr\_baczuARjCpmZgVd8mCqTF4AZ49OEyU4Cn-uTT0). If you have any questions or suggestions, feel free to leave a comment on the document! + +### How do I contribute to Feast? + +For more details on contributing to the Feast community, see [here](../community.md) and this [here](../project/contributing.md). + +### What are the plans for Feast Core, Feast Serving, and Feast Spark? + +Feast Core and Feast Serving were both part of Feast Java. We plan to support Feast Serving. We will not support Feast Core; instead we will support our object store based registry. We will not support Feast Spark. For more details on what we plan on supporting, please see the [roadmap](../roadmap.md). \ No newline at end of file diff --git a/docs/getting-started/install-feast.md b/docs/getting-started/install-feast.md deleted file mode 100644 index 055bbce585..0000000000 --- a/docs/getting-started/install-feast.md +++ /dev/null @@ -1,14 +0,0 @@ -# Install Feast - -Install Feast using [pip](https://pip.pypa.io): - -```text -pip install feast -``` - -Install Feast with GCP dependencies \(required when using BigQuery or Firestore\): - -```text -pip install 'feast[gcp]' -``` - diff --git a/docs/getting-started/install-feast/README.md b/docs/getting-started/install-feast/README.md deleted file mode 100644 index 6c1dd80134..0000000000 --- a/docs/getting-started/install-feast/README.md +++ /dev/null @@ -1,33 +0,0 @@ -# Install Feast - -{% hint style="success" %} -_Would you prefer a lighter-weight, pip-install, no-Kubernetes deployment of Feast?_ The Feast maintainers are currently building a new deployment experience for Feast. If you have thoughts on Feast's deployment, [chat with the maintainers](https://calendly.com/d/gc29-y88c/feast-chat-w-willem-and-jay) to learn more and provide feedback. -{% endhint %} - -A production deployment of Feast is deployed using Kubernetes. - -## Kubernetes \(with Helm\) - -This guide installs Feast into an existing Kubernetes cluster using Helm. The installation is not specific to any cloud platform or environment, but requires Kubernetes and Helm. - -## Amazon EKS \(with Terraform\) - -This guide installs Feast into an AWS environment using Terraform. The Terraform script is opinionated and intended to allow you to start quickly. - -## Azure AKS \(with Helm\) - -This guide installs Feast into an Azure AKS environment with Helm. - -## Azure AKS \(with Terraform\) - -This guide installs Feast into an Azure environment using Terraform. The Terraform script is opinionated and intended to allow you to start quickly. - -## Google Cloud GKE \(with Terraform\) - -This guide installs Feast into a Google Cloud environment using Terraform. The Terraform script is opinionated and intended to allow you to start quickly. - -## IBM Cloud Kubernetes Service \(IKS\) and Red Hat OpenShift \(using Kustomize\) - -This guide installs Feast into an existing [IBM Cloud Kubernetes Service](https://www.ibm.com/cloud/kubernetes-service) or [Red Hat OpenShift on IBM Cloud](https://www.ibm.com/cloud/openshift) using Kustomize. - -{% page-ref page="ibm-cloud-iks-with-kustomize.md" %} diff --git a/docs/getting-started/install-feast/google-cloud-gke-with-terraform.md b/docs/getting-started/install-feast/google-cloud-gke-with-terraform.md deleted file mode 100644 index a3252cf0bb..0000000000 --- a/docs/getting-started/install-feast/google-cloud-gke-with-terraform.md +++ /dev/null @@ -1,52 +0,0 @@ -# Google Cloud GKE \(with Terraform\) - -### Overview - -This guide installs Feast on GKE using our [reference Terraform configuration](https://github.com/feast-dev/feast/tree/master/infra/terraform/gcp). - -{% hint style="info" %} -The Terraform configuration used here is a greenfield installation that neither assumes anything about, nor integrates with, existing resources in your GCP account. The Terraform configuration presents an easy way to get started, but you may want to customize this set up before using Feast in production. -{% endhint %} - -This Terraform configuration creates the following resources: - -* GKE cluster -* Feast services running on GKE -* Google Memorystore \(Redis\) as online store -* Dataproc cluster -* Kafka running on GKE, exposed to the dataproc cluster via internal load balancer - -### 1. Requirements - -* Install [Terraform](https://www.terraform.io/) > = 0.12 \(tested with 0.13.3\) -* Install [Helm](https://helm.sh/docs/intro/install/) \(tested with v3.3.4\) -* GCP [authentication](https://cloud.google.com/docs/authentication) and sufficient [privilege](https://cloud.google.com/iam/docs/understanding-roles) to create the resources listed above. - -### 2. Configure Terraform - -Create a `.tfvars` file under`feast/infra/terraform/gcp`. Name the file. In our example, we use `my_feast.tfvars`. You can see the full list of configuration variables in `variables.tf`. Sample configurations are provided below: - -{% code title="my\_feast.tfvars" %} -```typescript -gcp_project_name = "kf-feast" -name_prefix = "feast-0-8" -region = "asia-east1" -gke_machine_type = "n1-standard-2" -network = "default" -subnetwork = "default" -dataproc_staging_bucket = "feast-dataproc" -``` -{% endcode %} - -### 3. Apply - -After completing the configuration, initialize Terraform and apply: - -```bash -$ cd feast/infra/terraform/gcp -$ terraform init -$ terraform apply -var-file=my_feast.tfvars -``` - - - diff --git a/docs/getting-started/install-feast/ibm-cloud-iks-with-kustomize.md b/docs/getting-started/install-feast/ibm-cloud-iks-with-kustomize.md deleted file mode 100644 index 817d4dbe14..0000000000 --- a/docs/getting-started/install-feast/ibm-cloud-iks-with-kustomize.md +++ /dev/null @@ -1,185 +0,0 @@ -# IBM Cloud Kubernetes Service and Red Hat OpenShift \(with Kustomize\) - -## Overview - -This guide installs Feast on an existing IBM Cloud Kubernetes cluster or Red Hat OpenShift on IBM Cloud , and ensures the following services are running: - -* Feast Core -* Feast Online Serving -* Postgres -* Redis -* Kafka \(Optional\) -* Feast Jupyter \(Optional\) -* Prometheus \(Optional\) - -## 1. Prerequisites - -1. [IBM Cloud Kubernetes Service](https://www.ibm.com/cloud/kubernetes-service) or [Red Hat OpenShift on IBM Cloud](https://www.ibm.com/cloud/openshift) -2. Install [Kubectl](https://cloud.ibm.com/docs/containers?topic=containers-cs_cli_install#kubectl) that matches the major.minor versions of your IKS or Install the [OpenShift CLI](https://cloud.ibm.com/docs/openshift?topic=openshift-openshift-cli#cli_oc) that matches your local operating system and OpenShift cluster version. -3. Install [Helm 3](https://helm.sh/) -4. Install [Kustomize](https://kubectl.docs.kubernetes.io/installation/kustomize/) - -## 2. Preparation -### IBM Cloud Block Storage Setup (IKS only) - -:warning: If you have Red Hat OpenShift Cluster on IBM Cloud skip to this [section](#Security-Context-Constraint-Setup). - -By default, IBM Cloud Kubernetes cluster uses [IBM Cloud File Storage](https://www.ibm.com/cloud/file-storage) based on NFS as the default storage class, and non-root users do not have write permission on the volume mount path for NFS-backed storage. Some common container images in Feast, such as Redis, Postgres, and Kafka specify a non-root user to access the mount path in the images. When containers are deployed using these images, the containers fail to start due to insufficient permissions of the non-root user creating folders on the mount path. - -[IBM Cloud Block Storage](https://www.ibm.com/cloud/block-storage) allows for the creation of raw storage volumes and provides faster performance without the permission restriction of NFS-backed storage - -Therefore, to deploy Feast we need to set up [IBM Cloud Block Storage](https://cloud.ibm.com/docs/containers?topic=containers-block_storage#install_block) as the default storage class so that you can have all the functionalities working and get the best experience from Feast. - -1. [Follow the instructions](https://helm.sh/docs/intro/install/) to install the Helm version 3 client on your local machine. -2. Add the IBM Cloud Helm chart repository to the cluster where you want to use the IBM Cloud Block Storage plug-in. - - ```text - helm repo add iks-charts https://icr.io/helm/iks-charts - helm repo update - ``` - -3. Install the IBM Cloud Block Storage plug-in. When you install the plug-in, pre-defined block storage classes are added to your cluster. - - ```text - helm install v2.0.2 iks-charts/ibmcloud-block-storage-plugin -n kube-system - ``` - - Example output: - - ```text - NAME: v2.0.2 - LAST DEPLOYED: Fri Feb 5 12:29:50 2021 - NAMESPACE: kube-system - STATUS: deployed - REVISION: 1 - NOTES: - Thank you for installing: ibmcloud-block-storage-plugin. Your release is named: v2.0.2 - ... - ``` - -4. Verify that all block storage plugin pods are in a "Running" state. - - ```text - kubectl get pods -n kube-system | grep ibmcloud-block-storage - ``` - -5. Verify that the storage classes for Block Storage were added to your cluster. - - ```text - kubectl get storageclasses | grep ibmc-block - ``` - -6. Set the Block Storage as the default storageclass. - - ```text - kubectl patch storageclass ibmc-block-gold -p '{"metadata": {"annotations":{"storageclass.kubernetes.io/is-default-class":"true"}}}' - kubectl patch storageclass ibmc-file-gold -p '{"metadata": {"annotations":{"storageclass.kubernetes.io/is-default-class":"false"}}}' - - # Check the default storageclass is block storage - kubectl get storageclass | grep \(default\) - ``` - - Example output: - - ```text - ibmc-block-gold (default) ibm.io/ibmc-block 65s - ``` -### Security Context Constraint Setup - -By default, in OpenShift, all pods or containers will use the [Restricted SCC](https://docs.openshift.com/container-platform/4.6/authentication/managing-security-context-constraints.html) which limits the UIDs pods can run with, causing the Feast installation to fail. To overcome this, you can allow Feast pods to run with any UID by executing the following: - -```text -oc adm policy add-scc-to-user anyuid -z default,kf-feast-kafka -n feast -``` -## 3. Installation - -Install Feast using kustomize. The pods may take a few minutes to initialize. - -```bash -git clone https://github.com/kubeflow/manifests -cd manifests/contrib/feast/ -kustomize build feast/base | kubectl apply -n feast -f - -``` -### Optional: Enable Feast Jupyter and Kafka - -You may optionally enable the Feast Jupyter component which contains code examples to demonstrate Feast. Some examples require Kafka to stream real time features to the Feast online serving. To enable, edit the following properties in the `values.yaml` under the `manifests/contrib/feast` folder: -``` -kafka.enabled: true -feast-jupyter.enabled: true -``` - -Then regenerate the resource manifests and deploy: -``` -make feast/base -kustomize build feast/base | kubectl apply -n feast -f - -``` - -## 4. Use Feast Jupyter to connect to Feast - -After all the pods are in a `RUNNING` state, port-forward to the Jupyter Notebook Server in the cluster: - -```bash -kubectl port-forward \ -$(kubectl get pod -l app=feast-jupyter -o custom-columns=:metadata.name) 8888:8888 -n feast -``` - -```text -Forwarding from 127.0.0.1:8888 -> 8888 -Forwarding from [::1]:8888 -> 8888 -``` - -You can now connect to the bundled Jupyter Notebook Server at `localhost:8888` and follow the example Jupyter notebook. - -{% embed url="http://localhost:8888/tree?" caption="" %} - -## 5. Uninstall Feast -```text -kustomize build feast/base | kubectl delete -n feast -f - -``` -## 6. Troubleshooting - -When running the minimal\_ride\_hailing\_example Jupyter Notebook example the following errors may occur: - -1. When running `job = client.get_historical_features(...)`: - - ```text - KeyError: 'historical_feature_output_location' - ``` - - or - - ```text - KeyError: 'spark_staging_location' - ``` - - Add the following environment variable: - - ```text - os.environ["FEAST_HISTORICAL_FEATURE_OUTPUT_LOCATION"] = "file:///home/jovyan/historical_feature_output" - os.environ["FEAST_SPARK_STAGING_LOCATION"] = "file:///home/jovyan/test_data" - ``` - -2. When running `job.get_status()` - - ```text - - ``` - - Add the following environment variable: - - ```text - os.environ["FEAST_REDIS_HOST"] = "feast-release-redis-master" - ``` - -3. When running `job = client.start_stream_to_online_ingestion(...)` - - ```text - org.apache.kafka.vendor.common.KafkaException: Failed to construct kafka consumer - ``` - - Add the following environment variable: - - ```text - os.environ["DEMO_KAFKA_BROKERS"] = "feast-release-kafka:9092" - ``` - diff --git a/docs/getting-started/install-feast/kubernetes-amazon-eks-with-terraform.md b/docs/getting-started/install-feast/kubernetes-amazon-eks-with-terraform.md deleted file mode 100644 index 99ff4a8e81..0000000000 --- a/docs/getting-started/install-feast/kubernetes-amazon-eks-with-terraform.md +++ /dev/null @@ -1,68 +0,0 @@ -# Amazon EKS \(with Terraform\) - -### Overview - -This guide installs Feast on AWS using our [reference Terraform configuration](https://github.com/feast-dev/feast/tree/master/infra/terraform/aws). - -{% hint style="info" %} -The Terraform configuration used here is a greenfield installation that neither assumes anything about, nor integrates with, existing resources in your AWS account. The Terraform configuration presents an easy way to get started, but you may want to customize this set up before using Feast in production. -{% endhint %} - -This Terraform configuration creates the following resources: - -* Kubernetes cluster on Amazon EKS \(3x r3.large nodes\) -* Kafka managed by Amazon MSK \(2x kafka.t3.small nodes\) -* Postgres database for Feast metadata, using serverless Aurora \(min capacity: 2\) -* Redis cluster, using Amazon Elasticache \(1x cache.t2.micro\) -* Amazon EMR cluster to run Spark \(3x spot m4.xlarge\) -* Staging S3 bucket to store temporary data - -![](../../.gitbook/assets/feast-on-aws-3-%20%282%29%20%282%29%20%282%29%20%282%29%20%282%29%20%282%29%20%282%29%20%281%29.png) - -### 1. Requirements - -* Create an AWS account and [configure credentials locally](https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-configure.html) -* Install [Terraform](https://www.terraform.io/) > = 0.12 \(tested with 0.13.3\) -* Install [Helm](https://helm.sh/docs/intro/install/) \(tested with v3.3.4\) - -### 2. Configure Terraform - -Create a `.tfvars` file under`feast/infra/terraform/aws`. Name the file. In our example, we use `my_feast.tfvars`. You can see the full list of configuration variables in `variables.tf`. At a minimum, you need to set `name_prefix` and an AWS region: - -{% code title="my\_feast.tfvars" %} -```typescript -name_prefix = "my-feast" -region = "us-east-1" -``` -{% endcode %} - -### 3. Apply - -After completing the configuration, initialize Terraform and apply: - -```bash -$ cd feast/infra/terraform/aws -$ terraform init -$ terraform apply -var-file=my_feast.tfvars -``` - -Starting may take a minute. A kubectl configuration file is also created in this directory, and the file's name will start with `kubeconfig_` and end with a random suffix. - -### 4. Connect to Feast using Jupyter - -After all pods are running, connect to the Jupyter Notebook Server running in the cluster. - -To connect to the remote Feast server you just created, forward a port from the remote k8s cluster to your local machine. Replace `kubeconfig_XXXXXXX` below with the kubeconfig file name Terraform generates for you. - -```bash -KUBECONFIG=kubeconfig_XXXXXXX kubectl port-forward \ -$(kubectl get pod -o custom-columns=:metadata.name | grep jupyter) 8888:8888 -``` - -```text -Forwarding from 127.0.0.1:8888 -> 8888 -Forwarding from [::1]:8888 -> 8888 -``` - -You can now connect to the bundled Jupyter Notebook Server at `localhost:8888` and follow the example Jupyter notebook. - diff --git a/docs/getting-started/install-feast/kubernetes-azure-aks-with-helm.md b/docs/getting-started/install-feast/kubernetes-azure-aks-with-helm.md deleted file mode 100644 index 66ba73ef23..0000000000 --- a/docs/getting-started/install-feast/kubernetes-azure-aks-with-helm.md +++ /dev/null @@ -1,139 +0,0 @@ -# Azure AKS \(with Helm\) - -## Overview - -This guide installs Feast on Azure Kubernetes cluster \(known as AKS\), and ensures the following services are running: - -* Feast Core -* Feast Online Serving -* Postgres -* Redis -* Spark -* Kafka -* Feast Jupyter \(Optional\) -* Prometheus \(Optional\) - -## 1. Requirements - -1. Install and configure [Azure CLI](https://docs.microsoft.com/en-us/cli/azure/install-azure-cli) -2. Install and configure [Kubectl](https://kubernetes.io/docs/tasks/tools/install-kubectl/) -3. Install [Helm 3](https://helm.sh/) - -## 2. Preparation - -Create an AKS cluster with Azure CLI. The detailed steps can be found [here](https://docs.microsoft.com/en-us/azure/aks/kubernetes-walkthrough), and a high-level walk through includes: - -```bash -az group create --name myResourceGroup --location eastus -az acr create --resource-group myResourceGroup --name feast-AKS-ACR --sku Basic -az aks create -g myResourceGroup -n feast-AKS --location eastus --attach-acr feast-AKS-ACR --generate-ssh-keys - -az aks install-cli -az aks get-credentials --resource-group myResourceGroup --name feast-AKS -``` - -Add the Feast Helm repository and download the latest charts: - -```bash -helm version # make sure you have the latest Helm installed -helm repo add feast-charts https://feast-helm-charts.storage.googleapis.com -helm repo update -``` - -Feast includes a Helm chart that installs all necessary components to run Feast Core, Feast Online Serving, and an example Jupyter notebook. - -Feast Core requires Postgres to run, which requires a secret to be set on Kubernetes: - -```bash -kubectl create secret generic feast-postgresql --from-literal=postgresql-password=password -``` - -## 3. Feast installation - -Install Feast using Helm. The pods may take a few minutes to initialize. - -```bash -helm install feast-release feast-charts/feast -``` - -## 4. Spark operator installation - -Follow the documentation [to install Spark operator on Kubernetes ](https://github.com/GoogleCloudPlatform/spark-on-k8s-operator), and Feast documentation to [configure Spark roles](../../reference/feast-and-spark.md) - -```bash -helm repo add spark-operator https://googlecloudplatform.github.io/spark-on-k8s-operator -helm install my-release spark-operator/spark-operator --set serviceAccounts.spark.name=spark --set image.tag=v1beta2-1.1.2-2.4.5 -``` - -and ensure the service account used by Feast has permissions to manage Spark Application resources. This depends on your k8s setup, but typically you'd need to configure a Role and a RoleBinding like the one below: - -```text -cat < -rules: -- apiGroups: ["sparkoperator.k8s.io"] - resources: ["sparkapplications"] - verbs: ["create", "delete", "deletecollection", "get", "list", "update", "watch", "patch"] ---- -apiVersion: rbac.authorization.k8s.io/v1beta1 -kind: RoleBinding -metadata: - name: use-spark-operator - namespace: -roleRef: - kind: Role - name: use-spark-operator - apiGroup: rbac.authorization.k8s.io -subjects: - - kind: ServiceAccount - name: default -EOF -``` - -## 5. Use Jupyter to connect to Feast - -After all the pods are in a `RUNNING` state, port-forward to the Jupyter Notebook Server in the cluster: - -```bash -kubectl port-forward \ -$(kubectl get pod -o custom-columns=:metadata.name | grep jupyter) 8888:8888 -``` - -```text -Forwarding from 127.0.0.1:8888 -> 8888 -Forwarding from [::1]:8888 -> 8888 -``` - -You can now connect to the bundled Jupyter Notebook Server at `localhost:8888` and follow the example Jupyter notebook. - -{% embed url="http://localhost:8888/tree?" caption="" %} - -## 6. Environment variables - -If you are running the [Minimal Ride Hailing Example](https://github.com/feast-dev/feast/blob/master/examples/minimal/minimal_ride_hailing.ipynb), you may want to make sure the following environment variables are correctly set: - -```text -demo_data_location = "wasbs://@.blob.core.windows.net/" -os.environ["FEAST_AZURE_BLOB_ACCOUNT_NAME"] = "" -os.environ["FEAST_AZURE_BLOB_ACCOUNT_ACCESS_KEY"] = -os.environ["FEAST_HISTORICAL_FEATURE_OUTPUT_LOCATION"] = "wasbs://@.blob.core.windows.net/out/" -os.environ["FEAST_SPARK_STAGING_LOCATION"] = "wasbs://@.blob.core.windows.net/artifacts/" -os.environ["FEAST_SPARK_LAUNCHER"] = "k8s" -os.environ["FEAST_SPARK_K8S_NAMESPACE"] = "default" -os.environ["FEAST_HISTORICAL_FEATURE_OUTPUT_FORMAT"] = "parquet" -os.environ["FEAST_REDIS_HOST"] = "feast-release-redis-master.default.svc.cluster.local" -os.environ["DEMO_KAFKA_BROKERS"] = "feast-release-kafka.default.svc.cluster.local:9092" -``` - -## 7. Further Reading - -* [Feast Concepts](../../concepts/overview.md) -* [Feast Examples/Tutorials](https://github.com/feast-dev/feast/tree/master/examples) -* [Feast Helm Chart Documentation](https://github.com/feast-dev/feast/blob/master/infra/charts/feast/README.md) -* [Configuring Feast components](../../reference/configuration-reference.md) -* [Feast and Spark](../../reference/feast-and-spark.md) - diff --git a/docs/getting-started/install-feast/kubernetes-azure-aks-with-terraform.md b/docs/getting-started/install-feast/kubernetes-azure-aks-with-terraform.md deleted file mode 100644 index 71dd15908d..0000000000 --- a/docs/getting-started/install-feast/kubernetes-azure-aks-with-terraform.md +++ /dev/null @@ -1,63 +0,0 @@ -# Azure AKS \(with Terraform\) - -## Overview - -This guide installs Feast on Azure using our [reference Terraform configuration](https://github.com/feast-dev/feast/tree/master/infra/terraform/azure). - -{% hint style="info" %} -The Terraform configuration used here is a greenfield installation that neither assumes anything about, nor integrates with, existing resources in your Azure account. The Terraform configuration presents an easy way to get started, but you may want to customize this set up before using Feast in production. -{% endhint %} - -This Terraform configuration creates the following resources: - -* Kubernetes cluster on Azure AKS -* Kafka managed by HDInsight -* Postgres database for Feast metadata, running as a pod on AKS -* Redis cluster, using Azure Cache for Redis -* [spark-on-k8s-operator](https://github.com/GoogleCloudPlatform/spark-on-k8s-operator) to run Spark -* Staging Azure blob storage container to store temporary data - -## 1. Requirements - -* Create an Azure account and [configure credentials locally](https://docs.microsoft.com/en-us/cli/azure/install-azure-cli) -* Install [Terraform](https://www.terraform.io/) \(tested with 0.13.5\) -* Install [Helm](https://helm.sh/docs/intro/install/) \(tested with v3.4.2\) - -## 2. Configure Terraform - -Create a `.tfvars` file under`feast/infra/terraform/azure`. Name the file. In our example, we use `my_feast.tfvars`. You can see the full list of configuration variables in `variables.tf`. At a minimum, you need to set `name_prefix` and `resource_group`: - -{% code title="my\_feast.tfvars" %} -```typescript -name_prefix = "feast" -resource_group = "Feast" # pre-existing resource group -``` -{% endcode %} - -## 3. Apply - -After completing the configuration, initialize Terraform and apply: - -```bash -$ cd feast/infra/terraform/azure -$ terraform init -$ terraform apply -var-file=my_feast.tfvars -``` - -## 4. Connect to Feast using Jupyter - -After all pods are running, connect to the Jupyter Notebook Server running in the cluster. - -To connect to the remote Feast server you just created, forward a port from the remote k8s cluster to your local machine. - -```bash -kubectl port-forward $(kubectl get pod -o custom-columns=:metadata.name | grep jupyter) 8888:8888 -``` - -```text -Forwarding from 127.0.0.1:8888 -> 8888 -Forwarding from [::1]:8888 -> 8888 -``` - -You can now connect to the bundled Jupyter Notebook Server at `localhost:8888` and follow the example Jupyter notebook. - diff --git a/docs/getting-started/install-feast/kubernetes-with-helm.md b/docs/getting-started/install-feast/kubernetes-with-helm.md deleted file mode 100644 index f31d666ba9..0000000000 --- a/docs/getting-started/install-feast/kubernetes-with-helm.md +++ /dev/null @@ -1,69 +0,0 @@ -# Kubernetes \(with Helm\) - -## Overview - -This guide installs Feast on an existing Kubernetes cluster, and ensures the following services are running: - -* Feast Core -* Feast Online Serving -* Postgres -* Redis -* Feast Jupyter \(Optional\) -* Prometheus \(Optional\) - -## 1. Requirements - -1. Install and configure [Kubectl](https://kubernetes.io/docs/tasks/tools/install-kubectl/) -2. Install [Helm 3](https://helm.sh/) - -## 2. Preparation - -Add the Feast Helm repository and download the latest charts: - -```text -helm repo add feast-charts https://feast-helm-charts.storage.googleapis.com -helm repo update -``` - -Feast includes a Helm chart that installs all necessary components to run Feast Core, Feast Online Serving, and an example Jupyter notebook. - -Feast Core requires Postgres to run, which requires a secret to be set on Kubernetes: - -```bash -kubectl create secret generic feast-postgresql --from-literal=postgresql-password=password -``` - -## 3. Installation - -Install Feast using Helm. The pods may take a few minutes to initialize. - -```bash -helm install feast-release feast-charts/feast -``` - -## 4. Use Jupyter to connect to Feast - -After all the pods are in a `RUNNING` state, port-forward to the Jupyter Notebook Server in the cluster: - -```bash -kubectl port-forward \ -$(kubectl get pod -l app=feast-jupyter -o custom-columns=:metadata.name) 8888:8888 -``` - -```text -Forwarding from 127.0.0.1:8888 -> 8888 -Forwarding from [::1]:8888 -> 8888 -``` - -You can now connect to the bundled Jupyter Notebook Server at `localhost:8888` and follow the example Jupyter notebook. - -{% embed url="http://localhost:8888/tree?" caption="" %} - -## 5. Further Reading - -* [Feast Concepts](../../concepts/overview.md) -* [Feast Examples/Tutorials](https://github.com/feast-dev/feast/tree/master/examples) -* [Feast Helm Chart Documentation](https://github.com/feast-dev/feast/blob/master/infra/charts/feast/README.md) -* [Configuring Feast components](../../reference/configuration-reference.md) -* [Feast and Spark](../../reference/feast-and-spark.md) - diff --git a/docs/getting-started/learn-feast.md b/docs/getting-started/learn-feast.md deleted file mode 100644 index 10f2eb6d29..0000000000 --- a/docs/getting-started/learn-feast.md +++ /dev/null @@ -1,15 +0,0 @@ -# Learn Feast - -Explore the following resources to learn more about Feast: - -* [Concepts](../) describes all important Feast API concepts. -* [User guide](../user-guide/define-and-ingest-features.md) provides guidance on completing Feast workflows. -* [Examples](https://github.com/feast-dev/feast/tree/master/examples) contains Jupyter notebooks that you can run on your Feast deployment. -* [Advanced](../advanced/troubleshooting.md) contains information about both advanced and operational aspects of Feast. -* [Reference](../reference/api/) contains detailed API and design documents for advanced users. -* [Contributing](../contributing/contributing.md) contains resources for anyone who wants to contribute to Feast. - -{% hint style="info" %} -The best way to learn Feast is to use it. Jump over to our [Quickstart](../quickstart.md) guide to have one of our examples running in no time at all! -{% endhint %} - diff --git a/docs/getting-started/quickstart.md b/docs/getting-started/quickstart.md new file mode 100644 index 0000000000..e9a294d5fc --- /dev/null +++ b/docs/getting-started/quickstart.md @@ -0,0 +1,364 @@ +# Quickstart + +In this tutorial we will + +1. Deploy a local feature store with a **Parquet file offline store** and **Sqlite online store**. +2. Build a training dataset using our time series features from our **Parquet files**. +3. Materialize feature values from the offline store into the online store. +4. Read the latest features from the online store for inference. + +You can run this tutorial in Google Colab or run it on your localhost, following the guided steps below. + +![](../.gitbook/assets/colab\_logo\_32px.png)[**Run in Google Colab**](https://colab.research.google.com/github/feast-dev/feast/blob/master/examples/quickstart/quickstart.ipynb) + +## Overview + +In this tutorial, we use feature stores to generate training data and power online model inference for a ride-sharing driver satisfaction prediction model. Feast solves several common issues in this flow: + +1. **Training-serving skew and complex data joins:** Feature values often exist across multiple tables. Joining these datasets can be complicated, slow, and error-prone. + * Feast joins these tables with battle-tested logic that ensures _point-in-time_ correctness so future feature values do not leak to models. + * Feast alerts users to offline / online skew with data quality monitoring +2. **Online feature availability:** At inference time, models often need access to features that aren't readily available and need to be precomputed from other datasources. + * Feast manages deployment to a variety of online stores (e.g. DynamoDB, Redis, Google Cloud Datastore) and ensures necessary features are consistently _available_ and _freshly computed_ at inference time. +3. **Feature reusability and model versioning:** Different teams within an organization are often unable to reuse features across projects, resulting in duplicate feature creation logic. Models have data dependencies that need to be versioned, for example when running A/B tests on model versions. + * Feast enables discovery of and collaboration on previously used features and enables versioning of sets of features (via _feature services_). + * Feast enables feature transformation so users can re-use transformation logic across online / offline usecases and across models. + +## Step 1: Install Feast + +Install the Feast SDK and CLI using pip: + +* In this tutorial, we focus on a local deployment. For a more in-depth guide on how to use Feast with Snowflake / GCP / AWS deployments, see [Running Feast with Snowflake/GCP/AWS](../how-to-guides/feast-snowflake-gcp-aws/) + +{% tabs %} +{% tab title="Bash" %} +```bash +pip install feast +``` +{% endtab %} +{% endtabs %} + +## Step 2: Create a feature repository + +Bootstrap a new feature repository using `feast init` from the command line. + +{% tabs %} +{% tab title="Bash" %} +```bash +feast init feature_repo +cd feature_repo +``` +{% endtab %} +{% endtabs %} + +{% tabs %} +{% tab title="Output" %} +``` +Creating a new Feast repository in /home/Jovyan/feature_repo. +``` +{% endtab %} +{% endtabs %} + +Let's take a look at the resulting demo repo itself. It breaks down into + +* `data/` contains raw demo parquet data +* `example.py` contains demo feature definitions +* `feature_store.yaml` contains a demo setup configuring where data sources are + +{% tabs %} +{% tab title="feature_store.yaml" %} +```yaml +project: my_project +registry: data/registry.db +provider: local +online_store: + path: data/online_store.db +``` +{% endtab %} + +{% tab title="example.py" %} +```python +# This is an example feature definition file + +from datetime import timedelta + +from feast import Entity, FeatureView, Field, FileSource, ValueType +from feast.types import Float32, Int64 + +# Read data from parquet files. Parquet is convenient for local development mode. For +# production, you can use your favorite DWH, such as BigQuery. See Feast documentation +# for more info. +driver_hourly_stats = FileSource( + path="/content/feature_repo/data/driver_stats.parquet", + timestamp_field="event_timestamp", + created_timestamp_column="created", +) + +# Define an entity for the driver. You can think of entity as a primary key used to +# fetch features. +# Entity has a name used for later reference (in a feature view, eg) +# and join_key to identify physical field name used in storages +driver = Entity(name="driver", value_type=ValueType.INT64, join_keys=["driver_id"], description="driver id",) + +# Our parquet files contain sample data that includes a driver_id column, timestamps and +# three feature column. Here we define a Feature View that will allow us to serve this +# data to our model online. +driver_hourly_stats_view = FeatureView( + name="driver_hourly_stats", + entities=["driver"], # reference entity by name + ttl=timedelta(seconds=86400 * 1), + schema=[ + Field(name="conv_rate", dtype=Float32), + Field(name="acc_rate", dtype=Float32), + Field(name="avg_daily_trips", dtype=Int64), + ], + online=True, + source=driver_hourly_stats, + tags={}, +) +``` +{% endtab %} +{% endtabs %} + +![Demo parquet data: data/driver\_stats.parquet](../.gitbook/assets/screen-shot-2021-08-23-at-2.35.18-pm.png) + +The key line defining the overall architecture of the feature store is the **provider**. This defines where the raw data exists (for generating training data & feature values for serving), and where to materialize feature values to in the online store (for serving). + +Valid values for `provider` in `feature_store.yaml` are: + +* local: use file source with SQLite/Redis +* gcp: use BigQuery/Snowflake with Google Cloud Datastore/Redis +* aws: use Redshift/Snowflake with DynamoDB/Redis + +Note that there are many other sources Feast works with, including Azure, Hive, Trino, and PostgreSQL via community plugins. See [Third party integrations](../getting-started/third-party-integrations.md) for all supported datasources. + +A custom setup can also be made by following [adding a custom provider](../how-to-guides/creating-a-custom-provider.md). + + +## Step 3: Register feature definitions and deploy your feature store + +The `apply` command scans python files in the current directory for feature view/entity definitions, registers the objects, and deploys infrastructure. In this example, it reads `example.py` (shown again below for convenience) and sets up SQLite online store tables. Note that we had specified SQLite as the default online store by using the `local` provider in `feature_store.yaml`. + +{% tabs %} +{% tab title="Bash" %} +```bash +feast apply +``` +{% endtab %} + +{% tab title="example.py" %} +```python +# This is an example feature definition file + +from datetime import timedelta + +from feast import Entity, FeatureView, Field, FileSource, ValueType +from feast.types import Float32, Int64 + +# Read data from parquet files. Parquet is convenient for local development mode. For +# production, you can use your favorite DWH, such as BigQuery. See Feast documentation +# for more info. +driver_hourly_stats = FileSource( + path="/content/feature_repo/data/driver_stats.parquet", + timestamp_field="event_timestamp", + created_timestamp_column="created", +) + +# Define an entity for the driver. You can think of entity as a primary key used to +# fetch features. +# Entity has a name used for later reference (in a feature view, eg) +# and join_key to identify physical field name used in storages +driver = Entity(name="driver", value_type=ValueType.INT64, join_keys=["driver_id"], description="driver id",) + +# Our parquet files contain sample data that includes a driver_id column, timestamps and +# three feature column. Here we define a Feature View that will allow us to serve this +# data to our model online. +driver_hourly_stats_view = FeatureView( + name="driver_hourly_stats", + entities=["driver"], # reference entity by name + ttl=timedelta(seconds=86400 * 1), + schema=[ + Field(name="conv_rate", dtype=Float32), + Field(name="acc_rate", dtype=Float32), + Field(name="avg_daily_trips", dtype=Int64), + ], + online=True, + source=driver_hourly_stats, + tags={}, +) +``` +{% endtab %} +{% endtabs %} + +{% tabs %} +{% tab title="Output" %} +``` +Registered entity driver_id +Registered feature view driver_hourly_stats +Deploying infrastructure for driver_hourly_stats +``` +{% endtab %} +{% endtabs %} + +## Step 4: Generating training data + +To train a model, we need features and labels. Often, this label data is stored separately (e.g. you have one table storing user survey results and another set of tables with feature values). + +The user can query that table of labels with timestamps and pass that into Feast as an _entity dataframe_ for training data generation. In many cases, Feast will also intelligently join relevant tables to create the relevant feature vectors. + +* Note that we include timestamps because want the features for the same driver at various timestamps to be used in a model. + +{% tabs %} +{% tab title="Python" %} +```python +from datetime import datetime, timedelta +import pandas as pd + +from feast import FeatureStore + +# The entity dataframe is the dataframe we want to enrich with feature values +entity_df = pd.DataFrame.from_dict( + { + # entity's join key -> entity values + "driver_id": [1001, 1002, 1003], + + # label name -> label values + "label_driver_reported_satisfaction": [1, 5, 3], + + # "event_timestamp" (reserved key) -> timestamps + "event_timestamp": [ + datetime.now() - timedelta(minutes=11), + datetime.now() - timedelta(minutes=36), + datetime.now() - timedelta(minutes=73), + ], + } +) + +store = FeatureStore(repo_path=".") + +training_df = store.get_historical_features( + entity_df=entity_df, + features=[ + "driver_hourly_stats:conv_rate", + "driver_hourly_stats:acc_rate", + "driver_hourly_stats:avg_daily_trips", + ], +).to_df() + +print("----- Feature schema -----\n") +print(training_df.info()) + +print() +print("----- Example features -----\n") +print(training_df.head()) +``` +{% endtab %} +{% endtabs %} + +{% tabs %} +{% tab title="Output" %} +```bash +----- Feature schema ----- + + +Int64Index: 3 entries, 0 to 2 +Data columns (total 6 columns): + # Column Non-Null Count Dtype +--- ------ -------------- ----- + 0 event_timestamp 3 non-null datetime64[ns, UTC] + 1 driver_id 3 non-null int64 + 2 label_driver_reported_satisfaction 3 non-null int64 + 3 conv_rate 3 non-null float32 + 4 acc_rate 3 non-null float32 + 5 avg_daily_trips 3 non-null int32 +dtypes: datetime64[ns, UTC](1), float32(2), int32(1), int64(2) +memory usage: 132.0 bytes +None + +----- Example features ----- + + event_timestamp driver_id ... acc_rate avg_daily_trips +0 2021-08-23 15:12:55.489091+00:00 1003 ... 0.120588 938 +1 2021-08-23 15:49:55.489089+00:00 1002 ... 0.504881 635 +2 2021-08-23 16:14:55.489075+00:00 1001 ... 0.138416 606 + +[3 rows x 6 columns] +``` +{% endtab %} +{% endtabs %} + +## Step 5: Load features into your online store + +We now serialize the latest values of features since the beginning of time to prepare for serving (note: `materialize-incremental` serializes all new features since the last `materialize` call). + +{% tabs %} +{% tab title="Bash" %} +```bash +CURRENT_TIME=$(date -u +"%Y-%m-%dT%H:%M:%S") +feast materialize-incremental $CURRENT_TIME +``` +{% endtab %} +{% endtabs %} + +{% tabs %} +{% tab title="Output" %} +```bash +Materializing 1 feature views to 2021-08-23 16:25:46+00:00 into the sqlite online +store. + +driver_hourly_stats from 2021-08-22 16:25:47+00:00 to 2021-08-23 16:25:46+00:00: +100%|████████████████████████████████████████████| 5/5 [00:00<00:00, 592.05it/s] +``` +{% endtab %} +{% endtabs %} + +## Step 6: Fetching feature vectors for inference + +At inference time, we need to quickly read the latest feature values for different drivers (which otherwise might have existed only in batch sources) from the online feature store using `get_online_features()`. These feature vectors can then be fed to the model. + +{% tabs %} +{% tab title="Python" %} +```python +from pprint import pprint +from feast import FeatureStore + +store = FeatureStore(repo_path=".") + +feature_vector = store.get_online_features( + features=[ + "driver_hourly_stats:conv_rate", + "driver_hourly_stats:acc_rate", + "driver_hourly_stats:avg_daily_trips", + ], + entity_rows=[ + # {join_key: entity_value} + {"driver_id": 1004}, + {"driver_id": 1005}, + ], +).to_dict() + +pprint(feature_vector) +``` +{% endtab %} +{% endtabs %} + +{% tabs %} +{% tab title="Output" %} +```bash +{ + 'acc_rate': [0.5732735991477966, 0.7828438878059387], + 'avg_daily_trips': [33, 984], + 'conv_rate': [0.15498852729797363, 0.6263588070869446], + 'driver_id': [1004, 1005] +} +``` +{% endtab %} +{% endtabs %} + +## Next steps + +* Read the [Concepts](concepts/) page to understand the Feast data model. +* Read the [Architecture](architecture-and-components/) page. +* Check out our [Tutorials](../tutorials/tutorials-overview.md) section for more examples on how to use Feast. +* Follow our [Running Feast with Snowflake/GCP/AWS](../how-to-guides/feast-snowflake-gcp-aws/) guide for a more in-depth tutorial on using Feast. +* Join other Feast users and contributors in [Slack](https://slack.feast.dev) and become part of the community! diff --git a/docs/getting-started/third-party-integrations.md b/docs/getting-started/third-party-integrations.md new file mode 100644 index 0000000000..0c233d7b69 --- /dev/null +++ b/docs/getting-started/third-party-integrations.md @@ -0,0 +1,73 @@ +# Third party integrations + +We integrate with a wide set of tools and technologies so you can make Feast work in your existing stack. Many of these integrations are maintained as plugins to the main Feast repo. + +{% hint style="info" %} +Don't see your offline store or online store of choice here? Check out our guides to make a custom one! + +* [Adding a new offline store](../how-to-guides/adding-a-new-offline-store.md) +* [Adding a new online store](../how-to-guides/adding-support-for-a-new-online-store.md) +{% endhint %} + +## Integrations + +### **Data Sources** + +* [x] [Snowflake source](https://docs.feast.dev/reference/data-sources/snowflake) +* [x] [Redshift source](https://docs.feast.dev/reference/data-sources/redshift) +* [x] [BigQuery source](https://docs.feast.dev/reference/data-sources/bigquery) +* [x] [Parquet file source](https://docs.feast.dev/reference/data-sources/file) +* [x] [Synapse source (community plugin)](https://github.com/Azure/feast-azure) +* [x] [Hive (community plugin)](https://github.com/baineng/feast-hive) +* [x] [Postgres (community plugin)](https://github.com/nossrannug/feast-postgres) +* [x] [Spark (community plugin)](https://docs.feast.dev/reference/data-sources/spark) +* [x] Kafka / Kinesis sources (via [push support into the online store](https://docs.feast.dev/reference/data-sources/push)) +* [ ] HTTP source + +### Offline Stores + +* [x] [Snowflake](https://docs.feast.dev/reference/offline-stores/snowflake) +* [x] [Redshift](https://docs.feast.dev/reference/offline-stores/redshift) +* [x] [BigQuery](https://docs.feast.dev/reference/offline-stores/bigquery) +* [x] [Synapse (community plugin)](https://github.com/Azure/feast-azure) +* [x] [Hive (community plugin)](https://github.com/baineng/feast-hive) +* [x] [Postgres (community plugin)](https://github.com/nossrannug/feast-postgres) +* [x] [Trino (community plugin)](https://github.com/Shopify/feast-trino) +* [x] [Spark (community plugin)](https://docs.feast.dev/reference/offline-stores/spark) +* [x] [In-memory / Pandas](https://docs.feast.dev/reference/offline-stores/file) +* [x] [Custom offline store support](https://docs.feast.dev/how-to-guides/adding-a-new-offline-store) + +### Online Stores + +* [x] [DynamoDB](https://docs.feast.dev/reference/online-stores/dynamodb) +* [x] [Redis](https://docs.feast.dev/reference/online-stores/redis) +* [x] [Datastore](https://docs.feast.dev/reference/online-stores/datastore) +* [x] [SQLite](https://docs.feast.dev/reference/online-stores/sqlite) +* [x] [Azure Cache for Redis (community plugin)](https://github.com/Azure/feast-azure) +* [x] [Postgres (community plugin)](https://github.com/nossrannug/feast-postgres) +* [x] [Custom online store support](https://docs.feast.dev/how-to-guides/adding-support-for-a-new-online-store) +* [ ] Bigtable (in progress) +* [ ] Cassandra + +### **Deployments** + +* [x] AWS Lambda (Alpha release. See [guide](../reference/alpha-aws-lambda-feature-server.md) and [RFC](https://docs.google.com/document/d/1eZWKWzfBif66LDN32IajpaG-j82LSHCCOzY6R7Ax7MI/edit)) +* [x] Kubernetes (See [guide](https://docs.feast.dev/how-to-guides/running-feast-in-production#4.3.-java-based-feature-server-deployed-on-kubernetes)) +* [ ] Cloud Run +* [ ] KNative + + +## Standards + +In order for a plugin integration to be highlighted on this page, it must meet the following requirements: + +1. The plugin must have tests. Ideally it would use the Feast universal tests (see this [guide](../how-to-guides/adding-or-reusing-tests.md) for an example), but custom tests are fine. +2. The plugin must have some basic documentation on how it should be used. +3. The author must work with a maintainer to pass a basic code review (e.g. to ensure that the implementation roughly matches the core Feast implementations). + +In order for a plugin integration to be merged into the main Feast repo, it must meet the following requirements: + +1. The PR must pass all integration tests. The universal tests (tests specifically designed for custom integrations) must be updated to test the integration. +2. There is documentation and a tutorial on how to use the integration. +3. The author (or someone else) agrees to take ownership of all the files, and maintain those files going forward. +4. If the plugin is being contributed by an organization, and not an individual, the organization should provide the infrastructure (or credits) for integration tests. diff --git a/docs/how-to-guides/adding-a-new-offline-store.md b/docs/how-to-guides/adding-a-new-offline-store.md new file mode 100644 index 0000000000..8eeac7bcf4 --- /dev/null +++ b/docs/how-to-guides/adding-a-new-offline-store.md @@ -0,0 +1,282 @@ +# Adding a new offline store + +## Overview + +Feast makes adding support for a new offline store (database) easy. Developers can simply implement the [OfflineStore](../../sdk/python/feast/infra/offline\_stores/offline\_store.py#L41) interface to add support for a new store (other than the existing stores like Parquet files, Redshift, and Bigquery). + +In this guide, we will show you how to extend the existing File offline store and use in a feature repo. While we will be implementing a specific store, this guide should be representative for adding support for any new offline store. + +The full working code for this guide can be found at [feast-dev/feast-custom-offline-store-demo](https://github.com/feast-dev/feast-custom-offline-store-demo). + +The process for using a custom offline store consists of 4 steps: + +1. Defining an `OfflineStore` class. +2. Defining an `OfflineStoreConfig` class. +3. Defining a `RetrievalJob` class for this offline store. +4. Defining a `DataSource` class for the offline store +5. Referencing the `OfflineStore` in a feature repo's `feature_store.yaml` file. +6. Testing the `OfflineStore` class. + +## 1. Defining an OfflineStore class + +{% hint style="info" %} + OfflineStore class names must end with the OfflineStore suffix! +{% endhint %} + +The OfflineStore class contains a couple of methods to read features from the offline store. Unlike the OnlineStore class, Feast does not manage any infrastructure for the offline store. + +There are two methods that deal with reading data from the offline stores`get_historical_features`and `pull_latest_from_table_or_query`. + +* `pull_latest_from_table_or_query` is invoked when running materialization (using the `feast materialize` or `feast materialize-incremental` commands, or the corresponding `FeatureStore.materialize()` method. This method pull data from the offline store, and the `FeatureStore` class takes care of writing this data into the online store. +* `get_historical_features` is invoked when reading values from the offline store using the `FeatureStore.get_historical_features()` method. Typically, this method is used to retrieve features when training ML models. +* `pull_all_from_table_or_query` is a method that pulls all the data from an offline store from a specified start date to a specified end date. + +{% code title="feast_custom_offline_store/file.py" %} +```python + def get_historical_features(self, + config: RepoConfig, + feature_views: List[FeatureView], + feature_refs: List[str], + entity_df: Union[pd.DataFrame, str], + registry: Registry, project: str, + full_feature_names: bool = False) -> RetrievalJob: + print("Getting historical features from my offline store") + return super().get_historical_features(config, + feature_views, + feature_refs, + entity_df, + registry, + project, + full_feature_names) + + def pull_latest_from_table_or_query(self, + config: RepoConfig, + data_source: DataSource, + join_key_columns: List[str], + feature_name_columns: List[str], + timestamp_field: str, + created_timestamp_column: Optional[str], + start_date: datetime, + end_date: datetime) -> RetrievalJob: + print("Pulling latest features from my offline store") + return super().pull_latest_from_table_or_query(config, + data_source, + join_key_columns, + feature_name_columns, + timestamp_field=timestamp_field, + created_timestamp_column, + start_date, + end_date) +``` +{% endcode %} + +## 2. Defining an OfflineStoreConfig class + +Additional configuration may be needed to allow the OfflineStore to talk to the backing store. For example, Redshift needs configuration information like the connection information for the Redshift instance, credentials for connecting to the database, etc. + +To facilitate configuration, all OfflineStore implementations are **required** to also define a corresponding OfflineStoreConfig class in the same file. This OfflineStoreConfig class should inherit from the `FeastConfigBaseModel` class, which is defined [here](../../sdk/python/feast/repo\_config.py#L44). + +The `FeastConfigBaseModel` is a [pydantic](https://pydantic-docs.helpmanual.io) class, which parses yaml configuration into python objects. Pydantic also allows the model classes to define validators for the config classes, to make sure that the config classes are correctly defined. + +This config class **must** container a `type` field, which contains the fully qualified class name of its corresponding OfflineStore class. + +Additionally, the name of the config class must be the same as the OfflineStore class, with the `Config` suffix. + +An example of the config class for the custom file offline store : + +{% code title="feast_custom_offline_store/file.py" %} +```python +class CustomFileOfflineStoreConfig(FeastConfigBaseModel): + """ Custom offline store config for local (file-based) store """ + + type: Literal["feast_custom_offline_store.file.CustomFileOfflineStore"] \ + = "feast_custom_offline_store.file.CustomFileOfflineStore" +``` +{% endcode %} + +This configuration can be specified in the `feature_store.yaml` as follows: + +{% code title="feature_repo/feature_store.yaml" %} +```yaml +type: feast_custom_offline_store.file.CustomFileOfflineStore +``` +{% endcode %} + +This configuration information is available to the methods of the OfflineStore, via the`config: RepoConfig` parameter which is passed into the methods of the OfflineStore interface, specifically at the `config.offline_store` field of the `config` parameter. + +{% code title="feast_custom_offline_store/file.py" %} +```python + def get_historical_features(self, + config: RepoConfig, + feature_views: List[FeatureView], + feature_refs: List[str], + entity_df: Union[pd.DataFrame, str], + registry: Registry, project: str, + full_feature_names: bool = False) -> RetrievalJob: + + offline_store_config = config.offline_store + assert isinstance(offline_store_config, CustomFileOfflineStoreConfig) + store_type = offline_store_config.type +``` +{% endcode %} + +## 3. Defining a RetrievalJob class + +The offline store methods aren't expected to perform their read operations eagerly. Instead, they are expected to execute lazily, and they do so by returning a `RetrievalJob` instance, which represents the execution of the actual query against the underlying store. + +Custom offline stores may need to implement their own instances of the `RetrievalJob` interface. + +The `RetrievalJob` interface exposes two methods - `to_df` and `to_arrow`. The expectation is for the retrieval job to be able to return the rows read from the offline store as a parquet DataFrame, or as an Arrow table respectively. + +{% code title="feast_custom_offline_store/file.py" %} +```python +class CustomFileRetrievalJob(RetrievalJob): + def __init__(self, evaluation_function: Callable): + """Initialize a lazy historical retrieval job""" + + # The evaluation function executes a stored procedure to compute a historical retrieval. + self.evaluation_function = evaluation_function + + def to_df(self): + # Only execute the evaluation function to build the final historical retrieval dataframe at the last moment. + print("Getting a pandas DataFrame from a File is easy!") + df = self.evaluation_function() + return df + + def to_arrow(self): + # Only execute the evaluation function to build the final historical retrieval dataframe at the last moment. + print("Getting a pandas DataFrame from a File is easy!") + df = self.evaluation_function() + return pyarrow.Table.from_pandas(df) +``` +{% endcode %} + +## 4. Defining a DataSource class for the offline store + +Before this offline store can be used as the batch source for a feature view in a feature repo, a subclass of the `DataSource` [base class](https://rtd.feast.dev/en/master/index.html?highlight=DataSource#feast.data\_source.DataSource) needs to be defined. This class is responsible for holding information needed by specific feature views to support reading historical values from the offline store. For example, a feature view using Redshift as the offline store may need to know which table contains historical feature values. + +The data source class should implement two methods - `from_proto`, and `to_proto`. + +For custom offline stores that are not being implemented in the main feature repo, the `custom_options` field should be used to store any configuration needed by the data source. In this case, the implementer is responsible for serializing this configuration into bytes in the `to_proto` method and reading the value back from bytes in the `from_proto` method. + +{% code title="feast_custom_offline_store/file.py" %} +```python +class CustomFileDataSource(FileSource): + """Custom data source class for local files""" + def __init__( + self, + timestamp_field: Optional[str] = "", + path: Optional[str] = None, + field_mapping: Optional[Dict[str, str]] = None, + created_timestamp_column: Optional[str] = "", + date_partition_column: Optional[str] = "", + ): + super(CustomFileDataSource, self).__init__( + timestamp_field=timestamp_field, + created_timestamp_column, + field_mapping, + date_partition_column, + ) + self._path = path + + + @staticmethod + def from_proto(data_source: DataSourceProto): + custom_source_options = str( + data_source.custom_options.configuration, encoding="utf8" + ) + path = json.loads(custom_source_options)["path"] + return CustomFileDataSource( + field_mapping=dict(data_source.field_mapping), + path=path, + timestamp_field=data_source.timestamp_field, + created_timestamp_column=data_source.created_timestamp_column, + date_partition_column=data_source.date_partition_column, + ) + + def to_proto(self) -> DataSourceProto: + config_json = json.dumps({"path": self.path}) + data_source_proto = DataSourceProto( + type=DataSourceProto.CUSTOM_SOURCE, + custom_options=DataSourceProto.CustomSourceOptions( + configuration=bytes(config_json, encoding="utf8") + ), + ) + + data_source_proto.timestamp_field = self.timestamp_field + data_source_proto.created_timestamp_column = self.created_timestamp_column + data_source_proto.date_partition_column = self.date_partition_column + + return data_source_proto +``` +{% endcode %} + +## 5. Using the custom offline store + +After implementing these classes, the custom offline store can be used by referencing it in a feature repo's `feature_store.yaml` file, specifically in the `offline_store` field. The value specified should be the fully qualified class name of the OfflineStore. + +As long as your OfflineStore class is available in your Python environment, it will be imported by Feast dynamically at runtime. + +To use our custom file offline store, we can use the following `feature_store.yaml`: + +{% code title="feature_repo/feature_store.yaml" %} +```yaml +project: test_custom +registry: data/registry.db +provider: local +offline_store: + type: feast_custom_offline_store.file.CustomFileOfflineStore +``` +{% endcode %} + +If additional configuration for the offline store is **not **required, then we can omit the other fields and only specify the `type` of the offline store class as the value for the `offline_store`. + +{% code title="feature_repo/feature_store.yaml" %} +```yaml +project: test_custom +registry: data/registry.db +provider: local +offline_store: feast_custom_offline_store.file.CustomFileOfflineStore +``` +{% endcode %} + +Finally, the custom data source class can be use in the feature repo to define a data source, and refer to in a feature view definition. + +{% code title="feature_repo/repo.py" %} +```python +pdriver_hourly_stats = CustomFileDataSource( + path="feature_repo/data/driver_stats.parquet", + timestamp_field="event_timestamp", + created_timestamp_column="created", +) + + +driver_hourly_stats_view = FeatureView( + source=driver_hourly_stats, + ... +) +``` +{% endcode %} + +## 6. Testing the OfflineStore class + +Even if you have created the `OfflineStore` class in a separate repo, you can still test your implementation against the Feast test suite, as long as you have Feast as a submodule in your repo. In the Feast submodule, we can run all the unit tests with: + +``` +make test +``` + +The universal tests, which are integration tests specifically intended to test offline and online stores, can be run with: + +``` +make test-python-universal +``` + +The unit tests should succeed, but the universal tests will likely fail. The tests are parametrized based on the `FULL_REPO_CONFIGS` variable defined in `sdk/python/tests/integration/feature_repos/repo_configuration.py`. To overwrite these configurations, you can simply create your own file that contains a `FULL_REPO_CONFIGS`, and point Feast to that file by setting the environment variable `FULL_REPO_CONFIGS_MODULE` to point to that file. The main challenge there will be to write a `DataSourceCreator` for the offline store. In this repo, the file that overwrites `FULL_REPO_CONFIGS` is `feast_custom_offline_store/feast_tests.py`, so you would run + +``` +export FULL_REPO_CONFIGS_MODULE='feast_custom_offline_store.feast_tests' +make test-python-universal +``` + +to test the offline store against the Feast universal tests. You should notice that some of the tests actually fail; this indicates that there is a mistake in the implementation of this offline store! diff --git a/docs/how-to-guides/adding-or-reusing-tests.md b/docs/how-to-guides/adding-or-reusing-tests.md new file mode 100644 index 0000000000..86c116442f --- /dev/null +++ b/docs/how-to-guides/adding-or-reusing-tests.md @@ -0,0 +1,211 @@ +# Adding or reusing tests + +## Overview + +This guide will go over: + +1. how Feast tests are setup +2. how to extend the test suite to test new functionality +3. how to use the existing test suite to test a new custom offline / online store. + +## Test suite overview + +Let's inspect the test setup in `sdk/python/tests/integration`: + +```bash +$ tree + +. +├── e2e +│ └── test_universal_e2e.py +├── feature_repos +│ ├── repo_configuration.py +│ └── universal +│ ├── data_source_creator.py +│ ├── data_sources +│ │ ├── bigquery.py +│ │ ├── file.py +│ │ └── redshift.py +│ ├── entities.py +│ └── feature_views.py +├── offline_store +│ ├── test_s3_custom_endpoint.py +│ └── test_universal_historical_retrieval.py +├── online_store +│ ├── test_e2e_local.py +│ ├── test_feature_service_read.py +│ ├── test_online_retrieval.py +│ └── test_universal_online.py +├── registration +│ ├── test_cli.py +│ ├── test_cli_apply_duplicated_featureview_names.py +│ ├── test_cli_chdir.py +│ ├── test_feature_service_apply.py +│ ├── test_feature_store.py +│ ├── test_inference.py +│ ├── test_registry.py +│ ├── test_universal_odfv_feature_inference.py +│ └── test_universal_types.py +└── scaffolding + ├── test_init.py + ├── test_partial_apply.py + ├── test_repo_config.py + └── test_repo_operations.py + +8 directories, 27 files +``` + +`feature_repos` has setup files for most tests in the test suite and pytest fixtures for other tests. These fixtures parametrize on different offline stores, online stores, etc. and thus abstract away store specific implementations so tests don't need to rewrite e.g. uploading dataframes to a specific store for setup. + +## Understanding an example test + +Let's look at a sample test using the universal repo: + +{% tabs %} +{% tab title="Python" %} +```python +@pytest.mark.integration +@pytest.mark.parametrize("full_feature_names", [True, False], ids=lambda v: str(v)) +def test_historical_features(environment, universal_data_sources, full_feature_names): + store = environment.feature_store + + (entities, datasets, data_sources) = universal_data_sources + feature_views = construct_universal_feature_views(data_sources) + + customer_df, driver_df, orders_df, global_df, entity_df = ( + datasets["customer"], + datasets["driver"], + datasets["orders"], + datasets["global"], + datasets["entity"], + ) + # ... more test code + + customer_fv, driver_fv, driver_odfv, order_fv, global_fv = ( + feature_views["customer"], + feature_views["driver"], + feature_views["driver_odfv"], + feature_views["order"], + feature_views["global"], + ) + + feature_service = FeatureService( + "convrate_plus100", + features=[ + feature_views["driver"][["conv_rate"]], + feature_views["driver_odfv"] + ], + ) + + feast_objects = [] + feast_objects.extend( + [ + customer_fv, + driver_fv, + driver_odfv, + order_fv, + global_fv, + driver(), + customer(), + feature_service, + ] + ) + store.apply(feast_objects) + # ... more test code + + job_from_df = store.get_historical_features( + entity_df=entity_df_with_request_data, + features=[ + "driver_stats:conv_rate", + "driver_stats:avg_daily_trips", + "customer_profile:current_balance", + "customer_profile:avg_passenger_count", + "customer_profile:lifetime_trip_count", + "conv_rate_plus_100:conv_rate_plus_100", + "conv_rate_plus_100:conv_rate_plus_val_to_add", + "order:order_is_success", + "global_stats:num_rides", + "global_stats:avg_ride_length", + ], + full_feature_names=full_feature_names, + ) + actual_df_from_df_entities = job_from_df.to_df() + # ... more test code + + assert_frame_equal( + expected_df, actual_df_from_df_entities, check_dtype=False, + ) + # ... more test code +``` +{% endtab %} +{% endtabs %} + +The key fixtures are the `environment` and `universal_data_sources` fixtures, which are defined in the `feature_repos` directories. This by default pulls in a standard dataset with driver and customer entities, certain feature views, and feature values. By including the environment as a parameter, the test automatically parametrizes across other offline / online store combinations. + +## Writing a new test or reusing existing tests + +### To add a new test to an existing test file + +* Use the same function signatures as an existing test (e.g. use `environment` as an argument) to include the relevant test fixtures. +* If possible, expand an individual test instead of writing a new test, due to the cost of standing up offline / online stores. + +### To test a new offline / online store from a plugin repo + +* Install Feast in editable mode with `pip install -e`. +* The core tests for offline / online store behavior are parametrized by the `FULL_REPO_CONFIGS` variable defined in `feature_repos/repo_configuration.py`. To overwrite this variable without modifying the Feast repo, create your own file that contains a `FULL_REPO_CONFIGS` (which will require adding a new `IntegrationTestRepoConfig` or two) and set the environment variable `FULL_REPO_CONFIGS_MODULE` to point to that file. Then the core offline / online store tests can be run with `make test-python-universal`. +* See the [custom offline store demo](https://github.com/feast-dev/feast-custom-offline-store-demo) and the [custom online store demo](https://github.com/feast-dev/feast-custom-online-store-demo) for examples. + +### To include a new offline / online store in the main Feast repo + +* Extend `data_source_creator.py` for your offline store. +* In `repo_configuration.py` add a new`IntegrationTestRepoConfig` or two (depending on how many online stores you want to test). +* Run the full test suite with `make test-python-integration.` + +### Including a new offline / online store in the main Feast repo from external plugins with community maintainers. + +* This folder is for plugins that are officially maintained with community owners. Place the APIs in feast/infra/offline_stores/contrib/. +* Extend `data_source_creator.py` for your offline store and implement the required APIs. +* In `contrib_repo_configuration.py` add a new `IntegrationTestRepoConfig` (depending on how many online stores you want to test). +* Run the test suite on the contrib test suite with `make test-python-contrib-universal`. + +### To include a new online store + +* In `repo_configuration.py` add a new config that maps to a serialized version of configuration you need in `feature_store.yaml` to setup the online store. +* In `repo_configuration.py`, add new`IntegrationTestRepoConfig` for offline stores you want to test. +* Run the full test suite with `make test-python-integration` + +### To use custom data in a new test + +* Check `test_universal_types.py` for an example of how to do this. + +```python +@pytest.mark.integration +def your_test(environment: Environment): + df = #...# + data_source = environment.data_source_creator.create_data_source( + df, + destination_name=environment.feature_store.project + ) + your_fv = driver_feature_view(data_source) + entity = driver(value_type=ValueType.UNKNOWN) + fs.apply([fv, entity]) + + # ... run test +``` + +### Running your own redis cluster for testing + +* Install redis on your computer. If you are a mac user, you should be able to `brew install redis`. + * Running `redis-server --help` and `redis-cli --help` should show corresponding help menus. +* Run `cd scripts/create-cluster` and run `./create-cluster start` then `./create-cluster create` to start the server. You should see output that looks like this: +~~~~ +Starting 6001 +Starting 6002 +Starting 6003 +Starting 6004 +Starting 6005 +Starting 6006 +~~~~ +* You should be able to run the integration tests and have the redis cluster tests pass. +* If you would like to run your own redis cluster, you can run the above commands with your own specified ports and connect to the newly configured cluster. +* To stop the cluster, run `./create-cluster stop` and then `./create-cluster clean`. diff --git a/docs/how-to-guides/adding-support-for-a-new-online-store.md b/docs/how-to-guides/adding-support-for-a-new-online-store.md new file mode 100644 index 0000000000..fee47945bf --- /dev/null +++ b/docs/how-to-guides/adding-support-for-a-new-online-store.md @@ -0,0 +1,285 @@ +# Adding a new online store + +## Overview + +Feast makes adding support for a new online store (database) easy. Developers can simply implement the [OnlineStore](../../sdk/python/feast/infra/online\_stores/online\_store.py#L26) interface to add support for a new store (other than the existing stores like Redis, DynamoDB, SQLite, and Datastore). + +In this guide, we will show you how to integrate with MySQL as an online store. While we will be implementing a specific store, this guide should be representative for adding support for any new online store. + +The full working code for this guide can be found at [feast-dev/feast-custom-online-store-demo](https://github.com/feast-dev/feast-custom-online-store-demo). + +The process of using a custom online store consists of 3 steps: + +1. Defining the `OnlineStore` class. +2. Defining the `OnlineStoreConfig` class. +3. Referencing the `OnlineStore` in a feature repo's `feature_store.yaml` file. +4. Testing the `OnlineStore` class. + +## 1. Defining an OnlineStore class + +{% hint style="info" %} + OnlineStore class names must end with the OnlineStore suffix! +{% endhint %} + +The OnlineStore class broadly contains two sets of methods + +* One set deals with managing infrastructure that the online store needed for operations +* One set deals with writing data into the store, and reading data from the store. + +### 1.1 Infrastructure Methods + +There are two methods that deal with managing infrastructure for online stores, `update` and `teardown` + +* `update` is invoked when users run `feast apply` as a CLI command, or the `FeatureStore.apply()` sdk method. + +The `update` method should be used to perform any operations necessary before data can be written to or read from the store. The `update` method can be used to create MySQL tables in preparation for reads and writes to new feature views. + +* `teardown` is invoked when users run `feast teardown` or `FeatureStore.teardown()`. + +The `teardown` method should be used to perform any clean-up operations. `teardown` can be used to drop MySQL indices and tables corresponding to the feature views being deleted. + +{% code title="feast_custom_online_store/mysql.py" %} +```python +def update( + self, + config: RepoConfig, + tables_to_delete: Sequence[Union[FeatureTable, FeatureView]], + tables_to_keep: Sequence[Union[FeatureTable, FeatureView]], + entities_to_delete: Sequence[Entity], + entities_to_keep: Sequence[Entity], + partial: bool, +): + """ + An example of creating manging the tables needed for a mysql-backed online store. + """ + conn = self._get_conn(config) + cur = conn.cursor(buffered=True) + + project = config.project + + for table in tables_to_keep: + cur.execute( + f"CREATE TABLE IF NOT EXISTS {_table_id(project, table)} (entity_key VARCHAR(512), feature_name VARCHAR(256), value BLOB, event_ts timestamp, created_ts timestamp, PRIMARY KEY(entity_key, feature_name))" + ) + cur.execute( + f"CREATE INDEX {_table_id(project, table)}_ek ON {_table_id(project, table)} (entity_key);" + ) + + for table in tables_to_delete: + cur.execute( + f"DROP INDEX {_table_id(project, table)}_ek ON {_table_id(project, table)};" + ) + cur.execute(f"DROP TABLE IF EXISTS {_table_id(project, table)}") + + +def teardown( + self, + config: RepoConfig, + tables: Sequence[Union[FeatureTable, FeatureView]], + entities: Sequence[Entity], +): + """ + + """ + conn = self._get_conn(config) + cur = conn.cursor(buffered=True) + project = config.project + + for table in tables: + cur.execute( + f"DROP INDEX {_table_id(project, table)}_ek ON {_table_id(project, table)};" + ) + cur.execute(f"DROP TABLE IF EXISTS {_table_id(project, table)}") +``` +{% endcode %} + +### 1.2 Read/Write Methods + +There are two methods that deal with writing data to and from the online stores.`online_write_batch `and `online_read`. + +* `online_write_batch `is invoked when running materialization (using the `feast materialize` or `feast materialize-incremental` commands, or the corresponding `FeatureStore.materialize()` method. +* `online_read `is invoked when reading values from the online store using the `FeatureStore.get_online_features()` method. + +{% code title="feast_custom_online_store/mysql.py" %} +```python +def online_write_batch( + self, + config: RepoConfig, + table: Union[FeatureTable, FeatureView], + data: List[ + Tuple[EntityKeyProto, Dict[str, ValueProto], datetime, Optional[datetime]] + ], + progress: Optional[Callable[[int], Any]], +) -> None: + conn = self._get_conn(config) + cur = conn.cursor(buffered=True) + + project = config.project + + for entity_key, values, timestamp, created_ts in data: + entity_key_bin = serialize_entity_key(entity_key).hex() + timestamp = _to_naive_utc(timestamp) + if created_ts is not None: + created_ts = _to_naive_utc(created_ts) + + for feature_name, val in values.items(): + self.write_to_table(created_ts, cur, entity_key_bin, feature_name, project, table, timestamp, val) + self._conn.commit() + if progress: + progress(1) + +def online_read( + self, + config: RepoConfig, + table: Union[FeatureTable, FeatureView], + entity_keys: List[EntityKeyProto], + requested_features: Optional[List[str]] = None, +) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]: + conn = self._get_conn(config) + cur = conn.cursor(buffered=True) + + result: List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]] = [] + + project = config.project + for entity_key in entity_keys: + entity_key_bin = serialize_entity_key(entity_key).hex() + print(f"entity_key_bin: {entity_key_bin}") + + cur.execute( + f"SELECT feature_name, value, event_ts FROM {_table_id(project, table)} WHERE entity_key = %s", + (entity_key_bin,), + ) + + res = {} + res_ts = None + for feature_name, val_bin, ts in cur.fetchall(): + val = ValueProto() + val.ParseFromString(val_bin) + res[feature_name] = val + res_ts = ts + + if not res: + result.append((None, None)) + else: + result.append((res_ts, res)) + return result +``` +{% endcode %} + +## 2. Defining an OnlineStoreConfig class + +Additional configuration may be needed to allow the OnlineStore to talk to the backing store. For example, MySQL may need configuration information like the host at which the MySQL instance is running, credentials for connecting to the database, etc. + +To facilitate configuration, all OnlineStore implementations are **required** to also define a corresponding OnlineStoreConfig class in the same file. This OnlineStoreConfig class should inherit from the `FeastConfigBaseModel` class, which is defined [here](../../sdk/python/feast/repo\_config.py#L44). + +The `FeastConfigBaseModel` is a [pydantic](https://pydantic-docs.helpmanual.io) class, which parses yaml configuration into python objects. Pydantic also allows the model classes to define validators for the config classes, to make sure that the config classes are correctly defined. + +This config class **must** container a `type` field, which contains the fully qualified class name of its corresponding OnlineStore class. + +Additionally, the name of the config class must be the same as the OnlineStore class, with the `Config` suffix. + +An example of the config class for MySQL : + +{% code title="feast_custom_online_store/mysql.py" %} +```python +class MySQLOnlineStoreConfig(FeastConfigBaseModel): + type: Literal["feast_custom_online_store.mysql.MySQLOnlineStore"] = "feast_custom_online_store.mysql.MySQLOnlineStore" + + host: Optional[StrictStr] = None + user: Optional[StrictStr] = None + password: Optional[StrictStr] = None + database: Optional[StrictStr] = None +``` +{% endcode %} + +This configuration can be specified in the `feature_store.yaml` as follows: + +{% code title="feature_repo/feature_store.yaml" %} +```yaml +online_store: + type: feast_custom_online_store.mysql.MySQLOnlineStore + user: foo + password: bar +``` +{% endcode %} + +This configuration information is available to the methods of the OnlineStore, via the`config: RepoConfig` parameter which is passed into all the methods of the OnlineStore interface, specifically at the `config.online_store` field of the `config` parameter. + +{% code title="feast_custom_online_store/mysql.py" %} +```python +def online_write_batch( + self, + config: RepoConfig, + table: Union[FeatureTable, FeatureView], + data: List[ + Tuple[EntityKeyProto, Dict[str, ValueProto], datetime, Optional[datetime]] + ], + progress: Optional[Callable[[int], Any]], +) -> None: + + online_store_config = config.online_store + assert isinstance(online_store_config, MySQLOnlineStoreConfig) + + connection = mysql.connector.connect( + host=online_store_config.host or "127.0.0.1", + user=online_store_config.user or "root", + password=online_store_config.password, + database=online_store_config.database or "feast", + autocommit=True + ) +``` +{% endcode %} + +## 3. Using the custom online store + +After implementing both these classes, the custom online store can be used by referencing it in a feature repo's `feature_store.yaml` file, specifically in the `online_store` field. The value specified should be the fully qualified class name of the OnlineStore. + +As long as your OnlineStore class is available in your Python environment, it will be imported by Feast dynamically at runtime. + +To use our MySQL online store, we can use the following `feature_store.yaml`: + +{% code title="feature_repo/feature_store.yaml" %} +```yaml +project: test_custom +registry: data/registry.db +provider: local +online_store: + type: feast_custom_online_store.mysql.MySQLOnlineStore + user: foo + password: bar +``` +{% endcode %} + +If additional configuration for the online store is **not **required, then we can omit the other fields and only specify the `type` of the online store class as the value for the `online_store`. + +{% code title="feature_repo/feature_store.yaml" %} +```yaml +project: test_custom +registry: data/registry.db +provider: local +online_store: feast_custom_online_store.mysql.MySQLOnlineStore +``` +{% endcode %} + +## 4. Testing the OnlineStore class + +Even if you have created the `OnlineStore` class in a separate repo, you can still test your implementation against the Feast test suite, as long as you have Feast as a submodule in your repo. In the Feast submodule, we can run all the unit tests with: + +``` +make test +``` + +The universal tests, which are integration tests specifically intended to test offline and online stores, can be run with: + +``` +make test-python-universal +``` + +The unit tests should succeed, but the universal tests will likely fail. The tests are parametrized based on the `FULL_REPO_CONFIGS` variable defined in `sdk/python/tests/integration/feature_repos/repo_configuration.py`. To overwrite these configurations, you can simply create your own file that contains a `FULL_REPO_CONFIGS`, and point Feast to that file by setting the environment variable `FULL_REPO_CONFIGS_MODULE` to point to that file. In this repo, the file that overwrites `FULL_REPO_CONFIGS` is `feast_custom_online_store/feast_tests.py`, so you would run + +``` +export FULL_REPO_CONFIGS_MODULE='feast_custom_online_store.feast_tests' +make test-python-universal +``` + +to test the MySQL online store against the Feast universal tests. You should notice that some of the tests actually fail; this indicates that there is a mistake in the implementation of this online store! diff --git a/docs/how-to-guides/creating-a-custom-provider.md b/docs/how-to-guides/creating-a-custom-provider.md new file mode 100644 index 0000000000..40ec20ee6a --- /dev/null +++ b/docs/how-to-guides/creating-a-custom-provider.md @@ -0,0 +1,131 @@ +# Adding a custom provider + +### Overview + +All Feast operations execute through a `provider`. Operations like materializing data from the offline to the online store, updating infrastructure like databases, launching streaming ingestion jobs, building training datasets, and reading features from the online store. + +Custom providers allow Feast users to extend Feast to execute any custom logic. Examples include: + +* Launching custom streaming ingestion jobs \(Spark, Beam\) +* Launching custom batch ingestion \(materialization\) jobs \(Spark, Beam\) +* Adding custom validation to feature repositories during `feast apply` +* Adding custom infrastructure setup logic which runs during `feast apply` +* Extending Feast commands with in-house metrics, logging, or tracing + +Feast comes with built-in providers, e.g, `LocalProvider`, `GcpProvider`, and `AwsProvider`. However, users can develop their own providers by creating a class that implements the contract in the [Provider class](https://github.com/feast-dev/feast/blob/745a1b43d20c0169b675b1f28039854205fb8180/sdk/python/feast/infra/provider.py#L22). + +{% hint style="info" %} +This guide also comes with a fully functional [custom provider demo repository](https://github.com/feast-dev/feast-custom-provider-demo). Please have a look at the repository for a representative example of what a custom provider looks like, or fork the repository when creating your own provider. +{% endhint %} + +### Guide + +The fastest way to add custom logic to Feast is to extend an existing provider. The most generic provider is the `LocalProvider` which contains no cloud-specific logic. The guide that follows will extend the `LocalProvider` with operations that print text to the console. It is up to you as a developer to add your custom code to the provider methods, but the guide below will provide the necessary scaffolding to get you started. + +#### Step 1: Define a Provider class + +The first step is to define a custom provider class. We've created the `MyCustomProvider` below. + +```python +from datetime import datetime +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +from feast.entity import Entity +from feast.feature_table import FeatureTable +from feast.feature_view import FeatureView +from feast.infra.local import LocalProvider +from feast.infra.offline_stores.offline_store import RetrievalJob +from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto +from feast.protos.feast.types.Value_pb2 import Value as ValueProto +from feast.registry import Registry +from feast.repo_config import RepoConfig + + +class MyCustomProvider(LocalProvider): + def __init__(self, config: RepoConfig, repo_path): + super().__init__(config) + # Add your custom init code here. This code runs on every Feast operation. + + def update_infra( + self, + project: str, + tables_to_delete: Sequence[Union[FeatureTable, FeatureView]], + tables_to_keep: Sequence[Union[FeatureTable, FeatureView]], + entities_to_delete: Sequence[Entity], + entities_to_keep: Sequence[Entity], + partial: bool, + ): + super().update_infra( + project, + tables_to_delete, + tables_to_keep, + entities_to_delete, + entities_to_keep, + partial, + ) + print("Launching custom streaming jobs is pretty easy...") + + def materialize_single_feature_view( + self, + config: RepoConfig, + feature_view: FeatureView, + start_date: datetime, + end_date: datetime, + registry: Registry, + project: str, + tqdm_builder: Callable[[int], tqdm], + ) -> None: + super().materialize_single_feature_view( + config, feature_view, start_date, end_date, registry, project, tqdm_builder + ) + print("Launching custom batch jobs is pretty easy...") +``` + +Notice how in the above provider we have only overwritten two of the methods on the `LocalProvider`, namely `update_infra` and `materialize_single_feature_view`. These two methods are convenient to replace if you are planning to launch custom batch or streaming jobs. `update_infra` can be used for launching idempotent streaming jobs, and `materialize_single_feature_view` can be used for launching batch ingestion jobs. + +It is possible to overwrite all the methods on the provider class. In fact, it isn't even necessary to subclass an existing provider like `LocalProvider`. The only requirement for the provider class is that it follows the [Provider contract](https://github.com/feast-dev/feast/blob/048c837b2fa741b38b0e35b8f8e534761a232561/sdk/python/feast/infra/provider.py#L22). + +#### Step 2: Configuring Feast to use the provider + +Configure your [feature\_store.yaml](../reference/feature-repository/feature-store-yaml.md) file to point to your new provider class: + +```yaml +project: repo +registry: registry.db +provider: feast_custom_provider.custom_provider.MyCustomProvider +online_store: + type: sqlite + path: online_store.db +offline_store: + type: file +``` + +Notice how the `provider` field above points to the module and class where your provider can be found. + +#### Step 3: Using the provider + +Now you should be able to use your provider by running a Feast command: + +```bash +feast apply +``` + +```text +Registered entity driver_id +Registered feature view driver_hourly_stats +Deploying infrastructure for driver_hourly_stats +Launching custom streaming jobs is pretty easy... +``` + +It may also be necessary to add the module root path to your `PYTHONPATH` as follows: + +```bash +PYTHONPATH=$PYTHONPATH:/home/my_user/my_custom_provider feast apply +``` + +That's it. You should now have a fully functional custom provider! + +### Next steps + +Have a look at the [custom provider demo repository](https://github.com/feast-dev/feast-custom-provider-demo) for a fully functional example of a custom provider. Feel free to fork it when creating your own custom provider! + diff --git a/docs/getting-started/README.md b/docs/how-to-guides/feast-snowflake-gcp-aws/README.md similarity index 88% rename from docs/getting-started/README.md rename to docs/how-to-guides/feast-snowflake-gcp-aws/README.md index bb8ee164c0..753650080b 100644 --- a/docs/getting-started/README.md +++ b/docs/how-to-guides/feast-snowflake-gcp-aws/README.md @@ -1,4 +1,4 @@ -# Getting started +# Running Feast with Snowflake/GCP/AWS {% page-ref page="install-feast.md" %} @@ -12,5 +12,3 @@ {% page-ref page="read-features-from-the-online-store.md" %} - - diff --git a/docs/getting-started/build-a-training-dataset.md b/docs/how-to-guides/feast-snowflake-gcp-aws/build-a-training-dataset.md similarity index 94% rename from docs/getting-started/build-a-training-dataset.md rename to docs/how-to-guides/feast-snowflake-gcp-aws/build-a-training-dataset.md index 6e7eb8833f..97b3ad2cf5 100644 --- a/docs/getting-started/build-a-training-dataset.md +++ b/docs/how-to-guides/feast-snowflake-gcp-aws/build-a-training-dataset.md @@ -2,15 +2,15 @@ Feast allows users to build a training dataset from time-series feature data that already exists in an offline store. Users are expected to provide a list of features to retrieve \(which may span multiple feature views\), and a dataframe to join the resulting features onto. Feast will then execute a point-in-time join of multiple feature views onto the provided dataframe, and return the full resulting dataframe. -### Retrieving historical features +## Retrieving historical features -#### 1. Register your feature views +### 1. Register your feature views Please ensure that you have created a feature repository and that you have registered \(applied\) your feature views with Feast. {% page-ref page="deploy-a-feature-store.md" %} -#### 2. Define feature references +### 2. Define feature references Start by defining the feature references \(e.g., `driver_trips:average_daily_rides`\) for the features that you would like to retrieve from the offline store. These features can come from multiple feature tables. The only requirement is that the feature tables that make up the feature references have the same entity \(or composite entity\), and that they aren't located in the same offline store. @@ -61,7 +61,7 @@ from feast import FeatureStore fs = FeatureStore(repo_path="path/to/your/feature/repo") training_df = fs.get_historical_features( - feature_refs=[ + features=[ "driver_hourly_stats:conv_rate", "driver_hourly_stats:acc_rate" ], @@ -69,5 +69,5 @@ training_df = fs.get_historical_features( ).to_df() ``` -Once the feature references and an entity dataframe are defined, it is possible to call `get_historical_features()`. This method launches a job that executes a point-in-time join of features from the offline store onto the entity dataframe. Once completed, a job reference will be returned. This job reference can then be converted to a Pandas dataframe by calling `to_df()`. +Once the feature references and an entity dataframe are defined, it is possible to call `get_historical_features()`. This method launches a job that executes a point-in-time join of features from the offline store onto the entity dataframe. Once completed, a job reference will be returned. This job reference can then be converted to a Pandas dataframe by calling `to_df()`. diff --git a/docs/getting-started/create-a-feature-repository.md b/docs/how-to-guides/feast-snowflake-gcp-aws/create-a-feature-repository.md similarity index 61% rename from docs/getting-started/create-a-feature-repository.md rename to docs/how-to-guides/feast-snowflake-gcp-aws/create-a-feature-repository.md index 160a65b7d9..8754bc051a 100644 --- a/docs/getting-started/create-a-feature-repository.md +++ b/docs/how-to-guides/feast-snowflake-gcp-aws/create-a-feature-repository.md @@ -1,6 +1,6 @@ # Create a feature repository -A feature repository is a directory that contains the configuration of the feature store and individual features. This configuration is written as code \(Python/YAML\) and it's highly recommended that teams track it centrally using git. See [Feature Repository](../reference/feature-repository/) for a detailed explanation of feature repositories. +A feature repository is a directory that contains the configuration of the feature store and individual features. This configuration is written as code \(Python/YAML\) and it's highly recommended that teams track it centrally using git. See [Feature Repository](../../reference/feature-repository/) for a detailed explanation of feature repositories. The easiest way to create a new feature repository to use `feast init` command: @@ -13,10 +13,40 @@ Creating a new Feast repository in /<...>/tiny_pika. ``` {% endtab %} -{% tab title="GCP template" %} +{% tabs %} +{% tab title="Snowflake template" %} +```bash +feast init -t snowflake +Snowflake Deployment URL: ... +Snowflake User Name: ... +Snowflake Password: ... +Snowflake Role Name: ... +Snowflake Warehouse Name: ... +Snowflake Database Name: ... + +Creating a new Feast repository in /<...>/tiny_pika. ``` +{% endtab %} + +{% tab title="GCP template" %} +```text feast init -t gcp +Creating a new Feast repository in /<...>/tiny_pika. +``` +{% endtab %} + +{% tab title="AWS template" %} +```text +feast init -t aws +AWS Region (e.g. us-west-2): ... +Redshift Cluster ID: ... +Redshift Database Name: ... +Redshift User Name: ... +Redshift S3 Staging Location (s3://*): ... +Redshift IAM Role for S3 (arn:aws:iam::*:role/*): ... +Should I upload example data to Redshift (overwriting 'feast_driver_hourly_stats' table)? (Y/n): + Creating a new Feast repository in /<...>/tiny_pika. ``` {% endtab %} @@ -29,7 +59,7 @@ $ tree . └── tiny_pika ├── data - │   └── driver_stats.parquet + │ └── driver_stats.parquet ├── example.py └── feature_store.yaml @@ -48,6 +78,3 @@ You can now use this feature repository for development. You can try the followi * Run `feast apply` to apply these definitions to Feast. * Edit the example feature definitions in `example.py` and run `feast apply` again to change feature definitions. * Initialize a git repository in the same directory and checking the feature repository into version control. - - - diff --git a/docs/getting-started/deploy-a-feature-store.md b/docs/how-to-guides/feast-snowflake-gcp-aws/deploy-a-feature-store.md similarity index 85% rename from docs/getting-started/deploy-a-feature-store.md rename to docs/how-to-guides/feast-snowflake-gcp-aws/deploy-a-feature-store.md index bc61fe4ee2..d31a721fcc 100644 --- a/docs/getting-started/deploy-a-feature-store.md +++ b/docs/how-to-guides/feast-snowflake-gcp-aws/deploy-a-feature-store.md @@ -1,6 +1,6 @@ # Deploy a feature store -The Feast CLI can be used to deploy a feature store to your infrastructure, spinning up any necessary persistent resources like buckets or tables in data stores. The deployment target and effects depend on the `provider` that has been configured in your [feature\_store.yaml](../reference/feature-repository/feature-store-yaml.md) file, as well as the feature definitions found in your feature repository. +The Feast CLI can be used to deploy a feature store to your infrastructure, spinning up any necessary persistent resources like buckets or tables in data stores. The deployment target and effects depend on the `provider` that has been configured in your [feature\_store.yaml](../../reference/feature-repository/feature-store-yaml.md) file, as well as the feature definitions found in your feature repository. {% hint style="info" %} Here we'll be using the example repository we created in the previous guide, [Create a feature store](create-a-feature-repository.md). You can re-create it by running `feast init` in a new directory. @@ -25,7 +25,7 @@ At this point, no data has been materialized to your online store. Feast apply s ## Cleaning up -If you need to clean up the infrastructure created by `feast apply`, use the `teardown` command. +If you need to clean up the infrastructure created by `feast apply`, use the `teardown` command. {% hint style="danger" %} Warning: `teardown` is an irreversible command and will remove all feature store infrastructure. Proceed with caution! @@ -35,11 +35,5 @@ Warning: `teardown` is an irreversible command and will remove all feature store feast teardown ``` - - \*\*\*\* - - - - diff --git a/docs/how-to-guides/feast-snowflake-gcp-aws/install-feast.md b/docs/how-to-guides/feast-snowflake-gcp-aws/install-feast.md new file mode 100644 index 0000000000..26d95c6117 --- /dev/null +++ b/docs/how-to-guides/feast-snowflake-gcp-aws/install-feast.md @@ -0,0 +1,31 @@ +# Install Feast + +Install Feast using [pip](https://pip.pypa.io): + +``` +pip install feast +``` + +Install Feast with Snowflake dependencies (required when using Snowflake): + +``` +pip install 'feast[snowflake]' +``` + +Install Feast with GCP dependencies (required when using BigQuery or Firestore): + +``` +pip install 'feast[gcp]' +``` + +Install Feast with AWS dependencies (required when using Redshift or DynamoDB): + +``` +pip install 'feast[aws]' +``` + +Install Feast with Redis dependencies (required when using Redis, either through AWS Elasticache or independently): + +``` +pip install 'feast[redis]' +``` diff --git a/docs/getting-started/load-data-into-the-online-store.md b/docs/how-to-guides/feast-snowflake-gcp-aws/load-data-into-the-online-store.md similarity index 76% rename from docs/getting-started/load-data-into-the-online-store.md rename to docs/how-to-guides/feast-snowflake-gcp-aws/load-data-into-the-online-store.md index 71d7b564a4..e686a23ad1 100644 --- a/docs/getting-started/load-data-into-the-online-store.md +++ b/docs/how-to-guides/feast-snowflake-gcp-aws/load-data-into-the-online-store.md @@ -4,15 +4,15 @@ Feast allows users to load their feature data into an online store in order to s ## Materializing features -#### 1. Register feature views +### 1. Register feature views Before proceeding, please ensure that you have applied \(registered\) the feature views that should be materialized. {% page-ref page="deploy-a-feature-store.md" %} -#### 2.a Materialize +### 2.a Materialize -The materialize command allows users to materialize features over a specific historical time range into the online store. +The materialize command allows users to materialize features over a specific historical time range into the online store. ```bash feast materialize 2021-04-07T00:00:00 2021-04-08T00:00:00 @@ -20,16 +20,16 @@ feast materialize 2021-04-07T00:00:00 2021-04-08T00:00:00 The above command will query the batch sources for all feature views over the provided time range, and load the latest feature values into the configured online store. -It is also possible to materialize for specific feature views by using the `-v / --views` argument. +It is also possible to materialize for specific feature views by using the `-v / --views` argument. ```text feast materialize 2021-04-07T00:00:00 2021-04-08T00:00:00 \ --views driver_hourly_stats ``` -The materialize command is completely stateless. It requires the user to provide the time ranges that will be loaded into the online store. This command is best used from a scheduler that tracks state, like Airflow. +The materialize command is completely stateless. It requires the user to provide the time ranges that will be loaded into the online store. This command is best used from a scheduler that tracks state, like Airflow. -#### 2.b Materialize Incremental \(Alternative\) +### 2.b Materialize Incremental \(Alternative\) For simplicity, Feast also provides a materialize command that will only ingest new data that has arrived in the offline store. Unlike `materialize`, `materialize-incremental` will track the state of previous ingestion runs inside of the feature registry. @@ -41,7 +41,7 @@ feast materialize-incremental 2021-04-08T00:00:00 The `materialize-incremental` command functions similarly to `materialize` in that it loads data over a specific time range for all feature views \(or the selected feature views\) into the online store. -Unlike `materialize`, `materialize-incremental` automatically determines the start time from which to load features from batch sources of each feature view. The first time `materialize-incremental` is executed it will set the start time to the oldest timestamp of each data source, and the end time as the one provided by the user. For each run of `materialize-incremental`, the end timestamp will be tracked. +Unlike `materialize`, `materialize-incremental` automatically determines the start time from which to load features from batch sources of each feature view. The first time `materialize-incremental` is executed it will set the start time to the oldest timestamp of each data source, and the end time as the one provided by the user. For each run of `materialize-incremental`, the end timestamp will be tracked. Subsequent runs of `materialize-incremental` will then set the start time to the end time of the previous run, thus only loading new data that has arrived into the online store. Note that the end time that is tracked for each run is at the feature view level, not globally for all feature views, i.e, different feature views may have different periods that have been materialized into the online store. diff --git a/docs/getting-started/read-features-from-the-online-store.md b/docs/how-to-guides/feast-snowflake-gcp-aws/read-features-from-the-online-store.md similarity index 85% rename from docs/getting-started/read-features-from-the-online-store.md rename to docs/how-to-guides/feast-snowflake-gcp-aws/read-features-from-the-online-store.md index 37c203e0ee..7b0a46239b 100644 --- a/docs/getting-started/read-features-from-the-online-store.md +++ b/docs/how-to-guides/feast-snowflake-gcp-aws/read-features-from-the-online-store.md @@ -6,34 +6,35 @@ The Feast Python SDK allows users to retrieve feature values from an online stor Online stores only maintain the current state of features, i.e latest feature values. No historical data is stored or served. {% endhint %} -### Retrieving online features +## Retrieving online features -#### 1. Ensure that feature values have been loaded into the online store +### 1. Ensure that feature values have been loaded into the online store Please ensure that you have materialized \(loaded\) your feature values into the online store before starting {% page-ref page="load-data-into-the-online-store.md" %} -#### 2. Define feature references +### 2. Define feature references Create a list of features that you would like to retrieve. This list typically comes from the model training step and should accompany the model binary. ```python -feature_refs = [ +features = [ "driver_hourly_stats:conv_rate", "driver_hourly_stats:acc_rate" ] ``` -#### 3. Read online features +### 3. Read online features Next, we will create a feature store object and call `get_online_features()` which reads the relevant feature values directly from the online store. ```python fs = FeatureStore(repo_path="path/to/feature/repo") online_features = fs.get_online_features( - feature_refs=feature_refs, + features=features, entity_rows=[ + # {join_key: entity_value, ...} {"driver_id": 1001}, {"driver_id": 1002}] ).to_dict() diff --git a/docs/how-to-guides/fetching-java-features-k8s.md b/docs/how-to-guides/fetching-java-features-k8s.md new file mode 100644 index 0000000000..1aa6abd52b --- /dev/null +++ b/docs/how-to-guides/fetching-java-features-k8s.md @@ -0,0 +1,15 @@ +# How to set up a Java feature server + +This tutorial guides you on how to: + +* Define features and data sources in Feast using the Feast CLI +* Materialize features to a Redis cluster deployed on Kubernetes. +* Deploy a Feast Java feature server into a Kubernetes cluster using the Feast helm charts +* Retrieve features using the gRPC API exposed by the Feast Java server + +Try it and let us know what you think! + +| ![](../.gitbook/assets/github-mark-32px.png)[ View guide in Github](../../examples/java-demo/README.md) | +|:--------------------------------------------------------------------------------------------------------| + + diff --git a/docs/how-to-guides/production-kube.png b/docs/how-to-guides/production-kube.png new file mode 100644 index 0000000000..4bf4fb7728 Binary files /dev/null and b/docs/how-to-guides/production-kube.png differ diff --git a/docs/how-to-guides/production-lambda.png b/docs/how-to-guides/production-lambda.png new file mode 100644 index 0000000000..afb51fc082 Binary files /dev/null and b/docs/how-to-guides/production-lambda.png differ diff --git a/docs/how-to-guides/production-simple.png b/docs/how-to-guides/production-simple.png new file mode 100644 index 0000000000..688a1c009e Binary files /dev/null and b/docs/how-to-guides/production-simple.png differ diff --git a/docs/how-to-guides/production-spark.png b/docs/how-to-guides/production-spark.png new file mode 100644 index 0000000000..07a21cf991 Binary files /dev/null and b/docs/how-to-guides/production-spark.png differ diff --git a/docs/how-to-guides/running-feast-in-production.md b/docs/how-to-guides/running-feast-in-production.md new file mode 100644 index 0000000000..5380832609 --- /dev/null +++ b/docs/how-to-guides/running-feast-in-production.md @@ -0,0 +1,346 @@ +# Running Feast in production + +## Overview + +After learning about Feast concepts and playing with Feast locally, you're now ready to use Feast in production. +This guide aims to help with the transition from a sandbox project to production-grade deployment in the cloud or on-premise. + +Overview of typical production configuration is given below: + +![Overview](production-simple.png) + +{% hint style="success" %} +**Important note:** We're trying to keep Feast modular. With the exception of the core, most of the Feast blocks are loosely connected and can be used independently. Hence, you are free to build your own production configuration. +For example, you might not have a stream source and, thus, no need to write features in real-time to an online store. +Or you might not need to retrieve online features. + +Furthermore, there's no single "true" approach. As you will see in this guide, Feast usually provides several options for each problem. +It's totally up to you to pick a path that's better suited to your needs. +{% endhint %} + +In this guide we will show you how to: + +1. Deploy your feature store and keep your infrastructure in sync with your feature repository +2. Keep the data in your online store up to date +3. Use Feast for model training and serving +4. Ingest features from a stream source +5. Monitor your production deployment + +## 1. Automatically deploying changes to your feature definitions + +The first step to setting up a deployment of Feast is to create a Git repository that contains your feature definitions. The recommended way to version and track your feature definitions is by committing them to a repository and tracking changes through commits. + +Most teams will need to have a feature store deployed to more than one environment. We have created an example repository \([Feast Repository Example](https://github.com/feast-dev/feast-ci-repo-example)\) which contains two Feast projects, one per environment. + +The contents of this repository are shown below: + +```bash +├── .github +│ └── workflows +│ ├── production.yml +│ └── staging.yml +│ +├── staging +│ ├── driver_repo.py +│ └── feature_store.yaml +│ +└── production + ├── driver_repo.py + └── feature_store.yaml +``` + +The repository contains three sub-folders: + +* `staging/`: This folder contains the staging `feature_store.yaml` and Feast objects. Users that want to make changes to the Feast deployment in the staging environment will commit changes to this directory. +* `production/`: This folder contains the production `feature_store.yaml` and Feast objects. Typically users would first test changes in staging before copying the feature definitions into the production folder, before committing the changes. +* `.github`: This folder is an example of a CI system that applies the changes in either the `staging` or `production` repositories using `feast apply`. This operation saves your feature definitions to a shared registry \(for example, on GCS\) and configures your infrastructure for serving features. + +The `feature_store.yaml` contains the following: + +```text +project: staging +registry: gs://feast-ci-demo-registry/staging/registry.db +provider: gcp +``` + +Notice how the registry has been configured to use a Google Cloud Storage bucket. All changes made to infrastructure using `feast apply` are tracked in the `registry.db`. This registry will be accessed later by the Feast SDK in your training pipelines or model serving services in order to read features. + +{% hint style="success" %} +It is important to note that the CI system above must have access to create, modify, or remove infrastructure in your production environment. This is unlike clients of the feature store, who will only have read access. +{% endhint %} + +If your organization consists of many independent data science teams or a single group is working on several projects +that could benefit from sharing features, entities, sources, and transformations, then we encourage you to utilize Python packages inside each environment: + +``` +└── production + ├── common + │ ├── __init__.py + │ ├── sources.py + │ └── entities.py + ├── ranking + │ ├── __init__.py + │ ├── views.py + │ └── transformations.py + ├── segmentation + │ ├── __init__.py + │ ├── views.py + │ └── transformations.py + └── feature_store.yaml +``` + +In summary, once you have set up a Git based repository with CI that runs `feast apply` on changes, your infrastructure \(offline store, online store, and cloud environment\) will automatically be updated to support the loading of data into the feature store or retrieval of data. + +## 2. How to load data into your online store and keep it up to date + +To keep your online store up to date, you need to run a job that loads feature data from your feature view sources into your online store. In Feast, this loading operation is called materialization. + +### 2.1. Manual materializations +The simplest way to schedule materialization is to run an **incremental** materialization using the Feast CLI: + +```text +feast materialize-incremental 2022-01-01T00:00:00 +``` + +The above command will load all feature values from all feature view sources into the online store up to the time `2022-01-01T00:00:00`. + +A timestamp is required to set the end date for materialization. If your source is fully up to date then the end date would be the current time. However, if you are querying a source where data is not yet available, then you do not want to set the timestamp to the current time. You would want to use a timestamp that ends at a date for which data is available. The next time `materialize-incremental` is run, Feast will load data that starts from the previous end date, so it is important to ensure that the materialization interval does not overlap with time periods for which data has not been made available. This is commonly the case when your source is an ETL pipeline that is scheduled on a daily basis. + +An alternative approach to incremental materialization \(where Feast tracks the intervals of data that need to be ingested\), is to call Feast directly from your scheduler like Airflow. In this case, Airflow is the system that tracks the intervals that have been ingested. + +```text +feast materialize -v driver_hourly_stats 2020-01-01T00:00:00 2020-01-02T00:00:00 +``` + +In the above example we are materializing the source data from the `driver_hourly_stats` feature view over a day. This command can be scheduled as the final operation in your Airflow ETL, which runs after you have computed your features and stored them in the source location. Feast will then load your feature data into your online store. + +The timestamps above should match the interval of data that has been computed by the data transformation system. + +### 2.2. Automate periodic materializations + +It is up to you which orchestration/scheduler to use to periodically run `$ feast materialize`. +Feast keeps the history of materialization in its registry so that the choice could be as simple as a [unix cron util](https://en.wikipedia.org/wiki/Cron). +Cron util should be sufficient when you have just a few materialization jobs (it's usually one materialization job per feature view) triggered infrequently. +However, the amount of work can quickly outgrow the resources of a single machine. That happens because the materialization job needs to repackage all rows before writing them to an online store. That leads to high utilization of CPU and memory. +In this case, you might want to use a job orchestrator to run multiple jobs in parallel using several workers. +Kubernetes Jobs or Airflow are good choices for more comprehensive job orchestration. + +If you are using Airflow as a scheduler, Feast can be invoked through the [BashOperator](https://airflow.apache.org/docs/apache-airflow/stable/howto/operator/bash.html) after the [Python SDK](https://pypi.org/project/feast/) has been installed into a virtual environment and your feature repo has been synced: +```python +materialize = BashOperator( + task_id='materialize', + bash_command=f'feast materialize-incremental {datetime.datetime.now().replace(microsecond=0).isoformat()}', +) +``` + +{% hint style="success" %} +Important note: Airflow worker must have read and write permissions to the registry file on GS / S3 +since it pulls configuration and updates materialization history. +{% endhint %} + +## 3. How to use Feast for model training + +After we've defined our features and data sources in the repository, we can generate training datasets. + +The first thing we need to do in our training code is to create a `FeatureStore` object with a path to the registry. + +One way to ensure your production clients have access to the feature store is to provide a copy of the `feature_store.yaml` to those pipelines. This `feature_store.yaml` file will have a reference to the feature store registry, which allows clients to retrieve features from offline or online stores. + +```python +fs = FeatureStore(repo_path="production/") +``` + +Then, training data can be retrieved as follows: + +```python +feature_refs = [ + 'driver_hourly_stats:conv_rate', + 'driver_hourly_stats:acc_rate', + 'driver_hourly_stats:avg_daily_trips' +] + +training_df = fs.get_historical_features( + entity_df=entity_df, + features=feature_refs, +).to_df() + +model = ml.fit(training_df) +``` + +The most common way to productionize ML models is by storing and versioning models in a "model store", and then deploying these models into production. When using Feast, it is recommended that the list of feature references also be saved alongside the model. This ensures that models and the features they are trained on are paired together when being shipped into production: + +```python +# Save model +model.save('my_model.bin') + +# Save features +open('feature_refs.json', 'w') as f: + json.dump(feature_refs, f) +``` + +To test your model locally, you can simply create a `FeatureStore` object, fetch online features, and then make a prediction: + +```python +# Load model +model = ml.load('my_model.bin') + +# Load feature references +with open('feature_refs.json', 'r') as f: + feature_refs = json.load(f) + +# Create feature store object +fs = FeatureStore(repo_path="production/") + +# Read online features +feature_vector = fs.get_online_features( + features=feature_refs, + entity_rows=[{"driver_id": 1001}] +).to_dict() + +# Make a prediction +prediction = model.predict(feature_vector) +``` + +{% hint style="success" %} +It is important to note that both the training pipeline and model serving service need only read access to the feature registry and associated infrastructure. This prevents clients from accidentally making changes to the feature store. +{% endhint %} + +## 4. Retrieving online features for prediction + +Once you have successfully loaded (or in Feast terminology materialized) your data from batch sources into the online store, you can start consuming features for model inference. +There are three approaches for that purpose sorted from the most simple one (in an operational sense) to the most performant (benchmarks to be published soon): + +### 4.1. Use the Python SDK within an existing Python service + +This approach is the most convenient to keep your infrastructure as minimalistic as possible and avoid deploying extra services. +The Feast Python SDK will connect directly to the online store (Redis, Datastore, etc), pull the feature data, and run transformations locally (if required). +The obvious drawback is that your service must be written in Python to use the Feast Python SDK. +A benefit of using a Python stack is that you can enjoy production-grade services with integrations with many existing data science tools. + +To integrate online retrieval into your service use the following code: +```python +from feast import FeatureStore + +with open('feature_refs.json', 'r') as f: + feature_refs = json.loads(f) + +fs = FeatureStore(repo_path="production/") + +# Read online features +feature_vector = fs.get_online_features( + features=feature_refs, + entity_rows=[{"driver_id": 1001}] +).to_dict() +``` + +### 4.2. Consume features via HTTP API from Serverless Feature Server + +If you don't want to add the Feast Python SDK as a dependency, or your feature retrieval service is written in a non-Python language, +Feast can deploy a simple feature server +on serverless infrastructure (eg, AWS Lambda, Google Cloud Run) for you. +This service will provide an HTTP API with JSON I/O, which can be easily used with any programming language. + +[Read more about this feature](../reference/alpha-aws-lambda-feature-server.md) + +### 4.3. Java based Feature Server deployed on Kubernetes + +For users with very latency-sensitive and high QPS use-cases, Feast offers a high-performance Java feature server. +Besides the benefits of running on JVM, this implementation also provides a gRPC API, which guarantees good connection utilization and +small request / response body size (compared to JSON). +You will need the Feast Java SDK to retrieve features from this service. This SDK wraps all the gRPC logic for you and provides more convenient APIs. + +The Java based feature server can be deployed to Kubernetes cluster via Helm charts in a few simple steps: + +1. Install [kubectl](https://kubernetes.io/docs/tasks/tools/install-kubectl/) and [helm 3](https://helm.sh/) +2. Add the Feast Helm repository and download the latest charts: +``` +helm repo add feast-charts https://feast-helm-charts.storage.googleapis.com +helm repo update +``` +3. Run Helm Install +``` +helm install feast-release feast-charts/feast \ + --set global.registry.path=s3://feast/registries/prod \ + --set global.project= +``` + +This chart will deploy two services: `feature-server` and `transformation-service`. +Both must have read access to the registry file on cloud storage. Both will keep a copy of the registry in their memory and periodically refresh it, so expect some delays in update propagation in exchange for better performance. + +#### Load balancing + +The next step would be to install an L7 Load Balancer (eg, [Envoy](https://www.envoyproxy.io/)) in front of the Java feature server. +For seamless integration with Kubernetes (including services created by Feast Helm chart) we recommend using [Istio](https://istio.io/) as Envoy's orchestrator. + +## 5. Ingesting features from a stream source + +Recently Feast added functionality for [stream ingestion](../reference/data-sources/push.md). +Please note that this is still in an early phase and new incompatible changes may be introduced. + +### 5.1. Using Python SDK in your Apache Spark / Beam pipeline + +The default option to write features from a stream is to add the Python SDK into your existing PySpark / Beam pipeline. +Feast SDK provides writer implementation that can be called from `foreachBatch` stream writer in PySpark like this: + +```python +store = FeatureStore(...) + +def feast_writer(spark_df): + pandas_df = spark_df.to_pandas() + store.push("driver_hourly_stats", pandas_df) + +streamingDF.writeStream.foreachBatch(feast_writer).start() +``` + +### 5.2. Push service *(still under development)* + +Alternatively, if you want to ingest features directly from a broker (eg, Kafka or Kinesis), you can use the "push service", which will write to an online store. +This service will expose an HTTP API or when deployed on Serverless platforms like AWS Lambda or Google Cloud Run, +this service can be directly connected to Kinesis or PubSub. + +If you are using Kafka, [HTTP Sink](https://docs.confluent.io/kafka-connect-http/current/overview.html) could be utilized as a middleware. +In this case, the "push service" can be deployed on Kubernetes or as a Serverless function. + +## 6. Monitoring + +Feast services can report their metrics to a StatsD-compatible collector. To activate this function, you'll need to provide a StatsD IP address and a port when deploying the helm chart (in future, this will be added to `feature_store.yaml`). + +We use an [InfluxDB-style extension](https://github.com/prometheus/statsd_exporter#tagging-extensions) for StatsD format to be able to send tags along with metrics. Keep that in mind while selecting the collector ([telegraph](https://www.influxdata.com/blog/getting-started-with-sending-statsd-metrics-to-telegraf-influxdb/#introducing-influx-statsd) will work for sure). + +We chose StatsD since it's a de-facto standard with various implementations (eg, [1](https://github.com/prometheus/statsd_exporter), [2](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/statsd/README.md)) +and metrics can be easily exported to Prometheus, InfluxDB, AWS CloudWatch, etc. + +--- +## Summary + +Summarizing it all together we want to show several options of architecture that will be most frequently used in production: + +### Option #1 (currently preferred) + +* Feast SDK is being triggered by CI (eg, Github Actions). It applies the latest changes from the feature repo to the Feast registry +* Airflow manages materialization jobs to ingest data from DWH to the online store periodically +* For the stream ingestion Feast Python SDK is used in the existing Spark / Beam pipeline +* Online features are served via either a Python feature server or a high performance Java feature server + * Both the Java feature server and the transformation server are deployed on Kubernetes cluster (via Helm charts) +* Feast Python SDK is called locally to generate a training dataset + +![From Repository to Production: Feast Production Architecture](production-spark.png) + + +### Option #2 *(still in development)* + +Same as Option #1, except: +* Push service is deployed as AWS Lambda / Google Cloud Run and is configured as a sink for Kinesis or PubSub to ingest features directly from a stream broker. +Lambda / Cloud Run is being managed by Feast SDK (from CI environment) +* Materialization jobs are managed inside Kubernetes via Kubernetes Job (currently not managed by Helm) + +![With Push Service as Lambda](production-lambda.png) + + +### Option #3 *(still in development)* + +Same as Option #2, except: +* Push service is deployed on Kubernetes cluster and exposes an HTTP API that can be used as a sink for Kafka (via kafka-http connector) or accessed directly. + +![With Push Service in Kubernetes](production-kube.png) + diff --git a/docs/load-data-into-the-online-store.md b/docs/load-data-into-the-online-store.md deleted file mode 100644 index 48bfb27fc4..0000000000 --- a/docs/load-data-into-the-online-store.md +++ /dev/null @@ -1,2 +0,0 @@ -# Load data into the online store - diff --git a/docs/contributing/contributing.md b/docs/project/contributing.md similarity index 86% rename from docs/contributing/contributing.md rename to docs/project/contributing.md index ac1ab480a0..933237f204 100644 --- a/docs/contributing/contributing.md +++ b/docs/project/contributing.md @@ -4,11 +4,8 @@ We use [RFCs](https://en.wikipedia.org/wiki/Request_for_Comments) and [GitHub is We follow a process of [lazy consensus](http://community.apache.org/committers/lazyConsensus.html). If you believe you know what the project needs then just start development. If you are unsure about which direction to take with development then please communicate your ideas through a GitHub issue or through our [Slack Channel](../community.md) before starting development. -Please [submit a PR ](https://github.com/feast-dev/feast/pulls)to the master branch of the Feast repository once you are ready to submit your contribution. Code submission to Feast \(including submission from project maintainers\) require review and approval from maintainers or code owners. +Please [submit a PR](https://github.com/feast-dev/feast/pulls) to the master branch of the Feast repository once you are ready to submit your contribution. Code submission to Feast \(including submission from project maintainers\) require review and approval from maintainers or code owners. PRs that are submitted by the general public need to be identified as `ok-to-test`. Once enabled, [Prow](https://github.com/kubernetes/test-infra/tree/master/prow) will run a range of tests to verify the submission, after which community members will help to review the pull request. -{% hint style="success" %} -Please sign the [Google CLA](https://cla.developers.google.com/) in order to have your code merged into the Feast repository. -{% endhint %} - +See also [Community](../community.md) for other ways to get involved with the community (e.g. joining community calls) \ No newline at end of file diff --git a/docs/contributing/development-guide.md b/docs/project/development-guide.md similarity index 90% rename from docs/contributing/development-guide.md rename to docs/project/development-guide.md index 8d0de11480..6d5bee16af 100644 --- a/docs/contributing/development-guide.md +++ b/docs/project/development-guide.md @@ -9,11 +9,11 @@ This guide is targeted at developers looking to contribute to Feast: * [Feast Data Storage Format](development-guide.md#feast-data-storage-format) * [Feast Protobuf API](development-guide.md#feast-protobuf-api) -> Learn How the Feast [Contributing Process](https://docs.feast.dev/contributing/contributing) works. +> Learn How the Feast [Contributing Process](contributing.md) works. ## Project Structure -Feast is composed of [multiple components](https://docs.feast.dev/v/master/concepts/architecture#components) distributed into multiple repositories: +Feast is composed of [multiple components](../getting-started/architecture-and-components/) distributed into multiple repositories: @@ -92,11 +92,15 @@ Feast is composed of [multiple components](https://docs.feast.dev/v/master/conce ## Making a Pull Request -#### Incorporating upstream changes from master +{% hint style="info" %} +See also the CONTRIBUTING.md in the corresponding GitHub repository \(e.g. [main repo doc](https://github.com/feast-dev/feast/blob/master/CONTRIBUTING.md)\) +{% endhint %} + +### Incorporating upstream changes from master Our preference is the use of `git rebase` instead of `git merge` : `git pull -r` -#### Signing commits +### Signing commits Commits have to be signed before they are allowed to be merged into the Feast codebase: @@ -105,7 +109,7 @@ Commits have to be signed before they are allowed to be merged into the Feast co git commit -s -m "My first commit" ``` -#### Good practices to keep in mind +### Good practices to keep in mind * Fill in the description based on the default template configured when you first open the PR * What this PR does/why we need it @@ -136,7 +140,7 @@ Feast Protobuf API defines the common API used by Feast's Components: * Feast Protobuf API specifications are written in [proto3](https://developers.google.com/protocol-buffers/docs/proto3) in the Main Feast Repository. * Changes to the API should be proposed via a [GitHub Issue](https://github.com/feast-dev/feast/issues/new/choose) for discussion first. -#### Generating Language Bindings +### Generating Language Bindings The language specific bindings have to be regenerated when changes are made to the Feast Protobuf API: @@ -146,5 +150,3 @@ The language specific bindings have to be regenerated when changes are made to t | [Main Feast Repository](https://github.com/feast-dev/feast) | Golang | Run `make compile-protos-go` to generate bindings | | [Feast Java](https://github.com/feast-dev/feast-java) | Java | No action required: bindings are generated automatically during compilation. | -#### - diff --git a/docs/project/feast-0.9-vs-feast-0.10+.md b/docs/project/feast-0.9-vs-feast-0.10+.md new file mode 100644 index 0000000000..ed2f4ee336 --- /dev/null +++ b/docs/project/feast-0.9-vs-feast-0.10+.md @@ -0,0 +1,65 @@ +# Feast 0.9 vs Feast 0.10+ + +Feast 0.10 brought about major changes to the way Feast is architected and how the software is intended to be deployed, extended, and operated. + +{% hint style="success" %} +Please see [Upgrading from Feast 0.9](https://docs.google.com/document/d/1AOsr_baczuARjCpmZgVd8mCqTF4AZ49OEyU4Cn-uTT0/edit#) for a guide on how to upgrade to the latest Feast version. +{% endhint %} + +### Changes introduced in Feast 0.10 + +Feast contributors identified various [design challenges](https://feast.dev/blog/a-state-of-feast/) in Feast 0.9 that made deploying, operating, extending, and maintaining it challenging. These challenges applied both to users and contributors. \ +\ +Our goal is to make ML practitioners immediately productive in operationalizing data for machine learning. To that end, Feast 0.10+ made the following improvements on Feast 0.9: + +| Challenges in Feast 0.9** (Before)** | Changed in Feast 0.10+ (After) | +| ----------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| Hard to install because it was a heavy-weight system with many components requiring a lot of configuration |
  • Easy to install via pip install
  • Opinionated default configurations
  • No Helm charts necessary
| +| Engineering support needed to deploy/operate reliably |

  • Feast moves from a stack of services to a CLI/SDK
  • No need for Kubernetes or Spark
  • No long running processes or orchestrators
  • Leverages globally available managed services where possible
| +| Hard to develop/debug with tightly coupled components, async operations, and hard to debug components like Spark |
  • Easy to develop and debug
  • Modular components
  • Clear extension points
  • Fewer background operations
  • Faster feedback
  • Local mode
| +| Inability to benefit from cloud-native technologies because of focus on reusable technologies like Kubernetes and Spark |

  • Leverages best-in-class cloud technologies so users can enjoy scalable + powerful tech stacks without managing open source stacks themselves
| + +### Changes in more detail + +Where Feast 0.9 was a large stack of components that needed to be deployed to Kubernetes, Feast 0.10 is simply a lightweight SDK and CLI. It doesn’t need any long-running processes to operate. This SDK/CLI can deploy and configure your feature store to your infrastructure, and execute workflows like building training datasets or reading features from an online feature store. + +* **Feast 0.10 introduces local mode:** Local mode allows users to try out Feast in a completely local environment (without using any cloud technologies). This provides users with a responsive means of trying out the software before deploying it into a production environment. +* **Feast comes with opinionated defaults:** As much as possible we are attempting to make Feast a batteries-included feature store that removes the need for users to configure infinite configuration options (as with Feast 0.9). Feast 0.10 comes with sane default configuration options to deploy Feast on your infrastructure. +* **Feast Core was replaced by a file-based (S3, GCS) registry:** Feast Core is a metadata server that maintains and exposes an API of feature definitions. With Feast 0.10, we’ve moved this entire service into a single flat file that can be stored on either the local disk or in a central object store like S3 or GCS. The benefit of this change is that users don’t need to maintain a database and a registry service, yet they can still access all the metadata they had before. +* **Materialization is a CLI operation:** Instead of having ingestion jobs be managed by a job service, users can now schedule a batch ingestion job themselves by calling “materialize”. This change was introduced because most teams already have schedulers like Airflow in their organization. By starting ingestion jobs from Airflow, teams are now able to easily track state outside of Feast and to debug failures synchronously. Similarly, streaming ingestion jobs can be launched through the “apply” command +* **Doubling down on data warehouses:** Most modern data teams are doubling down on data warehouses like BigQuery, Snowflake, and Redshift. Feast doubles down on these big data technologies as the primary interfaces through which it launches batch operations (like training dataset generation). This reduces the development burden on Feast contributors (since they only need to reason about SQL), provides users with a more responsive experience, avoids moving data from the warehouse (to compute joins using Spark), and provides a more serverless and scalable experience to users. +* **Temporary loss of streaming support:** Unfortunately, Feast 0.10, 0.11, and 0.12 do not support streaming feature ingestion out of the box. It is entirely possible to launch streaming ingestion jobs using these Feast versions, but it requires the use of a Feast extension point to launch these ingestion jobs. It is still a core design goal for Feast to support streaming ingestion, so this change is in the development backlog for the Feast project. +* **Addition of extension points: **Feast 0.10+ introduces various extension points. Teams can override all feature store behavior by writing (or extending) a provider. It is also possible for teams to add their own data storage connectors for both an offline and online store using a plugin interface that Feast provides. + +### Comparison of architectures + +#### Feast 0.9 + +![](<../.gitbook/assets/image (9).png>) + +#### Feast 0.10, 0.11, and 0.12 architecture + +![](<../.gitbook/assets/image (19).png>) + +#### Feast 1.0 architecture (eventual goal) + +![](<../.gitbook/assets/image (21).png>) + +### Comparison of components + +| Component | Feast 0.9 | Feast 0.10, 011, 0.12+ | +| --------------------------- | ------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| **Architecture** |
  • Service-oriented architecture
  • Containers and services deployed to Kubernetes
|
  • SDK/CLI centric software
  • Feast is able to deploy or configure infrastructure for use as a feature store
| +| **Installation** | Terraform and Helm |
  • Pip to install SDK/CLI
  • Provider used to deploy Feast components to GCP, AWS, or other environments during apply
| +| **Required infrastructure** | Kubernetes, Postgres, Spark, Docker, Object Store | None | +| **Batch compute** | Yes (Spark based) |
  • Python native (client-side) for batch data loading
  • Data warehouse for batch compute
| +| **Streaming support** | Yes (Spark based) | Planned. Streaming jobs will be launched using _**apply**_ | +| **Offline store** | None (can source data from any source Spark supports) | BigQuery, Snowflake (planned), Redshift, or custom implementations | +| **Online store** | Redis | DynamoDB, Firestore, Redis, and more planned. | +| **Job Manager** | Yes | No | +| **Registry** | gRPC service with Postgres backend | File-based registry with accompanying SDK for exploration | +| **Local Mode** | No | Yes | + +### Upgrading from Feast 0.9 to the latest Feast + +Please see the [Feast 0.9 Upgrade Guide](https://docs.google.com/document/d/1AOsr_baczuARjCpmZgVd8mCqTF4AZ49OEyU4Cn-uTT0/edit#). diff --git a/docs/contributing/release-process.md b/docs/project/release-process.md similarity index 75% rename from docs/contributing/release-process.md rename to docs/project/release-process.md index c5d29b451d..af573c92c7 100644 --- a/docs/contributing/release-process.md +++ b/docs/project/release-process.md @@ -5,7 +5,7 @@ For Feast maintainers, these are the concrete steps for making a new release. 1. For new major or minor release, create and check out the release branch for the new stream, e.g. `v0.6-branch`. For a patch version, check out the stream's release branch. -2. Update the [CHANGELOG.md](https://github.com/feast-dev/feast/blob/master/CHANGELOG.md). See the [Creating a change log](release-process.md#creating-a-change-log) guide and commit +2. Update the [CHANGELOG.md](../../CHANGELOG.md). See the [Creating a change log](release-process.md#creating-a-change-log) guide and commit * Make to review each PR in the changelog to [flag any breaking changes and deprecation.](release-process.md#flag-breaking-changes-and-deprecations) 3. Update versions for the release/release candidate with a commit: 1. In the root `pom.xml`, remove `-SNAPSHOT` from the `` property, update versions, and commit. @@ -21,21 +21,20 @@ For Feast maintainers, these are the concrete steps for making a new release. 1. Bump to the next major/minor version and append `-SNAPSHOT` . 2. Add the change log by applying the change log commit created in step 2. 3. Check that versions are updated with `env TARGET_MERGE_BRANCH=master make lint-versions` -7. Create a [GitHub release](https://github.com/feast-dev/feast/releases) which includes a summary of im~~p~~ortant changes as well as any artifacts associated with the release. Make sure to include the same change log as added in [CHANGELOG.md](https://github.com/feast-dev/feast/blob/master/CHANGELOG.md). Use `Feast vX.Y.Z` as the title. -8. Update the[ Upgrade Guide](../feast-on-kubernetes/advanced-1/upgrading.md) to include the action required instructions for users to upgrade to this new release. Instructions should include a migration for each breaking change made to this release. +7. Create a [GitHub release](https://github.com/feast-dev/feast/releases) which includes a summary of im~~p~~ortant changes as well as any artifacts associated with the release. Make sure to include the same change log as added in [CHANGELOG.md](../../CHANGELOG.md). Use `Feast vX.Y.Z` as the title. -When a tag that matches a Semantic Version string is pushed, CI will automatically build and push the relevant artifacts to their repositories or package managers \(docker images, Python wheels, etc\). JVM artifacts are promoted from Sonatype OSSRH to Maven Central, but it sometimes takes some time for them to be available. The `sdk/go/v tag` is required to version the Go SDK go module so that users can go get a specific tagged release of the Go SDK. +When a tag that matches a Semantic Version string is pushed, CI will automatically build and push the relevant artifacts to their repositories or package managers (docker images, Python wheels, etc). JVM artifacts are promoted from Sonatype OSSRH to Maven Central, but it sometimes takes some time for them to be available. The `sdk/go/v tag` is required to version the Go SDK go module so that users can go get a specific tagged release of the Go SDK. ### Creating a change log -We use an [open source change log generator](https://hub.docker.com/r/ferrarimarco/github-changelog-generator/) to generate change logs. The process still requires a little bit of manual effort. +We use an [open source change log generator](https://hub.docker.com/r/ferrarimarco/github-changelog-generator/) to generate change logs. The process still requires a little bit of manual effort. -1. Create a GitHub token as [per these instructions](https://github.com/github-changelog-generator/github-changelog-generator#github-token). The token is used as an input argument \(`-t`\) to the change log generator. -2. The change log generator configuration below will look for unreleased changes on a specific branch. The branch will be `master` for a major/minor release, or a release branch \(`v0.4-branch`\) for a patch release. You will need to set the branch using the `--release-branch` argument. +1. Create a GitHub token as [per these instructions](https://github.com/github-changelog-generator/github-changelog-generator#github-token). The token is used as an input argument (`-t`) to the change log generator. +2. The change log generator configuration below will look for unreleased changes on a specific branch. The branch will be `master` for a major/minor release, or a release branch (`v0.4-branch`) for a patch release. You will need to set the branch using the `--release-branch` argument. 3. You should also set the `--future-release` argument. This is the version you are releasing. The version can still be changed at a later date. 4. Update the arguments below and run the command to generate the change log to the console. -```text +``` docker run -it --rm ferrarimarco/github-changelog-generator \ --user feast-dev \ --project feast \ @@ -52,14 +51,13 @@ docker run -it --rm ferrarimarco/github-changelog-generator \ ``` 1. Review each change log item. - * Make sure that sentences are grammatically correct and well formatted \(although we will try to enforce this at the PR review stage\). + * Make sure that sentences are grammatically correct and well formatted (although we will try to enforce this at the PR review stage). * Make sure that each item is categorised correctly. You will see the following categories: `Breaking changes`, `Implemented enhancements`, `Fixed bugs`, and `Merged pull requests`. Any unlabelled PRs will be found in `Merged pull requests`. It's important to make sure that any `breaking changes`, `enhancements`, or `bug fixes` are pulled up out of `merged pull requests` into the correct category. Housekeeping, tech debt clearing, infra changes, or refactoring do not count as `enhancements`. Only enhancements a user benefits from should be listed in that category. - * Make sure that the "Full Change log" link is actually comparing the correct tags \(normally your released version against the previously version\). + * Make sure that the "Full Change log" link is actually comparing the correct tags (normally your released version against the previously version). * Make sure that release notes and breaking changes are present. ### Flag Breaking Changes & Deprecations It's important to flag breaking changes and deprecation to the API for each release so that we can maintain API compatibility. -Developers should have flagged PRs with breaking changes with the `compat/breaking` label. However, it's important to double check each PR's release notes and contents for changes that will break API compatibility and manually label `compat/breaking` to PRs with undeclared breaking changes. The change log will have to be regenerated if any new labels have to be added. - +Developers should have flagged PRs with breaking changes with the `compat/breaking` label. However, it's important to double check each PR's release notes and contents for changes that will break API compatibility and manually label `compat/breaking` to PRs with undeclared breaking changes. The change log will have to be regenerated if any new labels have to be added. diff --git a/docs/contributing/versioning-policy.md b/docs/project/versioning-policy.md similarity index 94% rename from docs/contributing/versioning-policy.md rename to docs/project/versioning-policy.md index 1fc2c755b0..8e51676355 100644 --- a/docs/contributing/versioning-policy.md +++ b/docs/project/versioning-policy.md @@ -4,7 +4,7 @@ description: Versioning policies and status of Feast components # Versioning policy -### Versioning policy and branch workflow +## Versioning policy and branch workflow Feast uses [semantic versioning](https://semver.org/). @@ -19,9 +19,9 @@ A release branch should be substantially _feature complete_ with respect to the In general, unless you're committing code that only applies to a particular release stream \(for example, temporary hot-fixes, back-ported security fixes, or image hashes\), you should base changes from `master` and then merge or cherry-pick to the release branch. -### Feast Component Matrix +## Feast Component Matrix -The following table shows the **status** \(stable, beta, or alpha\) of Feast components. +The following table shows the **status** \(stable, beta, or alpha\) of Feast components. Application status indicators for Feast: @@ -38,7 +38,7 @@ Application status indicators for Feast: | [Feast Go Client](https://github.com/feast-dev/feast) | Beta | | | [Feast Spark Python SDK](https://github.com/feast-dev/feast-spark) | Alpha | | | [Feast Spark Launchers](https://github.com/feast-dev/feast-spark) | Alpha | | -| [Feast Job Service](https://github.com/feast-dev/feast-spark) | Alpha | At risk of deprecation | +| [Feast Job Service](https://github.com/feast-dev/feast-spark) | Alpha | Scheduled for deprecation | | [Feast Helm Chart](https://github.com/feast-dev/feast-helm-charts) | Beta | | | | | | @@ -59,7 +59,7 @@ Criteria for reaching **beta** status * API reference documentation * Deprecative changes must span multiple minor versions and allow for an upgrade path. -### Levels of support +## Levels of support Feast components have various levels of support based on the component status. @@ -69,7 +69,7 @@ Feast components have various levels of support based on the component status. | Beta | The Feast community offers best-effort support for beta applications. Beta applications will be supported for at least 2 more minor releases. | | Alpha | The response differs per application in alpha status, depending on the size of the community for that application and the current level of active development of the application. | -### Support from the Feast community +## Support from the Feast community Feast has an active and helpful community of users and contributors. diff --git a/docs/quickstart.md b/docs/quickstart.md deleted file mode 100644 index 866f96f302..0000000000 --- a/docs/quickstart.md +++ /dev/null @@ -1,132 +0,0 @@ -# Quickstart - -In this tutorial we will - -1. Deploy a local feature store with a **Parquet file offline store** and **Sqlite online store**. -2. Build a training dataset using our time series features from our **Parquet files**. -3. Materialize feature values from the offline store into the online store. -4. Read the latest features from the online store for inference. - -## Install Feast - -Install the Feast SDK and CLI using pip: - -```bash -pip install feast -``` - -## Create a feature repository - -Bootstrap a new feature repository using `feast init` from the command line: - -```text -feast init feature_repo -cd feature_repo -``` - -```text -Creating a new Feast repository in /home/Jovyan/feature_repo. -``` - -## Register feature definitions and deploy your feature store - -The `apply` command registers all the objects in your feature repository and deploys a feature store: - -```bash -feast apply -``` - -```text -Registered entity driver_id -Registered feature view driver_hourly_stats -Deploying infrastructure for driver_hourly_stats -``` - -## Generating training data - -The `apply` command builds a training dataset based on the time-series features defined in the feature repository: - -```python -from datetime import datetime - -import pandas as pd - -from feast import FeatureStore - -entity_df = pd.DataFrame.from_dict( - { - "driver_id": [1001, 1002, 1003, 1004], - "event_timestamp": [ - datetime(2021, 4, 12, 10, 59, 42), - datetime(2021, 4, 12, 8, 12, 10), - datetime(2021, 4, 12, 16, 40, 26), - datetime(2021, 4, 12, 15, 1, 12), - ], - } -) - -store = FeatureStore(repo_path=".") - -training_df = store.get_historical_features( - entity_df=entity_df, - feature_refs=[ - "driver_hourly_stats:conv_rate", - "driver_hourly_stats:acc_rate", - "driver_hourly_stats:avg_daily_trips", - ], -).to_df() - -print(training_df.head()) -``` - -```bash -event_timestamp driver_id driver_hourly_stats__conv_rate driver_hourly_stats__acc_rate driver_hourly_stats__avg_daily_trips -2021-04-12 1002 0.328245 0.993218 329 -2021-04-12 1001 0.448272 0.873785 767 -2021-04-12 1004 0.822571 0.571790 673 -2021-04-12 1003 0.556326 0.605357 335 -``` - -## Load features into your online store - -The `materialize` command loads the latest feature values from your feature views into your online store: - -```bash -CURRENT_TIME=$(date -u +"%Y-%m-%dT%H:%M:%S") -feast materialize-incremental $CURRENT_TIME -``` - -## Fetching feature vectors for inference - -```python -from pprint import pprint -from feast import FeatureStore - -store = FeatureStore(repo_path=".") - -feature_vector = store.get_online_features( - feature_refs=[ - "driver_hourly_stats:conv_rate", - "driver_hourly_stats:acc_rate", - "driver_hourly_stats:avg_daily_trips", - ], - entity_rows=[{"driver_id": 1001}], -).to_dict() - -pprint(feature_vector) -``` - -```python -{ - 'driver_id': [1001], - 'conv_rate': [0.49274], - 'acc_rate': [0.92743], - 'avg_daily_trips': [72], -} -``` - -## Next steps - -* Follow our [Getting Started](getting-started/) guide for a hands tutorial in using Feast -* Join other Feast users and contributors in [Slack](https://slack.feast.dev/) and become part of the community! - diff --git a/docs/read-features-from-the-online-store.md b/docs/read-features-from-the-online-store.md deleted file mode 100644 index db082897a2..0000000000 --- a/docs/read-features-from-the-online-store.md +++ /dev/null @@ -1,2 +0,0 @@ -# Read features from the online store - diff --git a/docs/reference/alpha-aws-lambda-feature-server.md b/docs/reference/alpha-aws-lambda-feature-server.md new file mode 100644 index 0000000000..eadcf40bb4 --- /dev/null +++ b/docs/reference/alpha-aws-lambda-feature-server.md @@ -0,0 +1,205 @@ +# \[Alpha\] AWS Lambda feature server + +**Warning**: This is an _experimental_ feature. It's intended for early testing and feedback, and could change without warnings in future releases. + +{% hint style="info" %} +To enable this feature, run **`feast alpha enable aws_lambda_feature_server`** +{% endhint %} + +## Overview + +The AWS Lambda feature server is an HTTP endpoint that serves features with JSON I/O, deployed as a Docker image through AWS Lambda and AWS API Gateway. This enables users to get features from Feast using any programming language that can make HTTP requests. A [local feature server](feature-servers/python-feature-server.md) is also available. A remote feature server on GCP Cloud Run is currently being developed. + +## Deployment + +The AWS Lambda feature server is only available to projects using the `AwsProvider` with registries on S3. It is disabled by default. To enable it, `feature_store.yaml` must be modified; specifically, the `enable` flag must be on and an `execution_role_name` must be specified. For example, after running `feast init -t aws`, changing the registry to be on S3, and enabling the feature server, the contents of `feature_store.yaml` should look similar to the following: + +```text +project: dev +registry: s3://feast/registries/dev +provider: aws +online_store: + region: us-west-2 +offline_store: + cluster_id: feast + region: us-west-2 + user: admin + database: feast + s3_staging_location: s3://feast/redshift/tests/staging_location + iam_role: arn:aws:iam::{aws_account}:role/redshift_s3_access_role +flags: + alpha_features: true + aws_lambda_feature_server: true +feature_server: + enabled: True + execution_role_name: arn:aws:iam::{aws_account}:role/lambda_execution_role +``` + +If enabled, the feature server will be deployed during `feast apply`. After it is deployed, the `feast endpoint` CLI command will indicate the server's endpoint. + +## Permissions + +Feast requires the following permissions in order to deploy and teardown AWS Lambda feature server: + +| Permissions | Resources | +| ----------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------- | +|

lambda:CreateFunction

lambda:GetFunction

lambda:DeleteFunction

lambda:AddPermission

lambda:UpdateFunctionConfiguration

| arn:aws:lambda:\:\:function:feast-\* | +|

ecr:CreateRepository

ecr:DescribeRepositories

ecr:DeleteRepository

ecr:PutImage

ecr:DescribeImages

ecr:BatchDeleteImage

ecr:CompleteLayerUpload

ecr:UploadLayerPart

ecr:InitiateLayerUpload

ecr:BatchCheckLayerAvailability

ecr:GetDownloadUrlForLayer

ecr:GetRepositoryPolicy

ecr:SetRepositoryPolicy

ecr:GetAuthorizationToken

| \* | +|

iam:PassRole

| arn:aws:iam::\:role/ | +|

apigateway:*

|

arn:aws:apigateway:*::/apis/*/routes/*/routeresponses

arn:aws:apigateway:*::/apis/*/routes/*/routeresponses/*

arn:aws:apigateway:*::/apis/*/routes/*

arn:aws:apigateway:*::/apis/*/routes

arn:aws:apigateway:*::/apis/*/integrations

arn:aws:apigateway:*::/apis/*/stages/*/routesettings/*

arn:aws:apigateway:*::/apis/*

arn:aws:apigateway:*::/apis

| + +The following inline policy can be used to grant Feast the necessary permissions: + +```javascript +{ + "Statement": [ + { + Action = [ + "lambda:CreateFunction", + "lambda:GetFunction", + "lambda:DeleteFunction", + "lambda:AddPermission", + "lambda:UpdateFunctionConfiguration", + ] + Effect = "Allow" + Resource = "arn:aws:lambda:::function:feast-*" + }, + { + Action = [ + "ecr:CreateRepository", + "ecr:DescribeRepositories", + "ecr:DeleteRepository", + "ecr:PutImage", + "ecr:DescribeImages", + "ecr:BatchDeleteImage", + "ecr:CompleteLayerUpload", + "ecr:UploadLayerPart", + "ecr:InitiateLayerUpload", + "ecr:BatchCheckLayerAvailability", + "ecr:GetDownloadUrlForLayer", + "ecr:GetRepositoryPolicy", + "ecr:SetRepositoryPolicy", + "ecr:GetAuthorizationToken" + ] + Effect = "Allow" + Resource = "*" + }, + { + Action = "iam:PassRole" + Effect = "Allow" + Resource = "arn:aws:iam:::role/" + }, + { + Effect = "Allow" + Action = "apigateway:*" + Resource = [ + "arn:aws:apigateway:*::/apis/*/routes/*/routeresponses", + "arn:aws:apigateway:*::/apis/*/routes/*/routeresponses/*", + "arn:aws:apigateway:*::/apis/*/routes/*", + "arn:aws:apigateway:*::/apis/*/routes", + "arn:aws:apigateway:*::/apis/*/integrations", + "arn:aws:apigateway:*::/apis/*/stages/*/routesettings/*", + "arn:aws:apigateway:*::/apis/*", + "arn:aws:apigateway:*::/apis", + ] + }, + ], + "Version": "2012-10-17" +} +``` + +## Example + +After `feature_store.yaml` has been modified as described in the previous section, it can be deployed as follows: + +```bash +$ feast apply +10/07/2021 03:57:26 PM INFO:Pulling remote image feastdev/feature-server-python-aws:aws: +10/07/2021 03:57:28 PM INFO:Creating remote ECR repository feast-python-server-key_shark-0_13_1_dev23_gb3c08320: +10/07/2021 03:57:29 PM INFO:Pushing local image to remote 402087665549.dkr.ecr.us-west-2.amazonaws.com/feast-python-server-key_shark-0_13_1_dev23_gb3c08320:0_13_1_dev23_gb3c08320: +10/07/2021 03:58:44 PM INFO:Deploying feature server... +10/07/2021 03:58:45 PM INFO: Creating AWS Lambda... +10/07/2021 03:58:46 PM INFO: Creating AWS API Gateway... +Registered entity driver_id +Registered feature view driver_hourly_stats +Deploying infrastructure for driver_hourly_stats + +$ feast endpoint +10/07/2021 03:59:01 PM INFO:Feature server endpoint: https://hkosgmz4m2.execute-api.us-west-2.amazonaws.com + +$ feast materialize-incremental $(date +%Y-%m-%d) +Materializing 1 feature views to 2021-10-06 17:00:00-07:00 into the dynamodb online store. + +driver_hourly_stats from 2020-10-08 23:01:34-07:00 to 2021-10-06 17:00:00-07:00: +100%|█████████████████████████████████████████████████████████████████| 5/5 [00:00<00:00, 16.89it/s] +``` + +After the feature server starts, we can execute cURL commands against it: + +```bash +$ curl -X POST \ + "https://hkosgmz4m2.execute-api.us-west-2.amazonaws.com/get-online-features" \ + -H "Content-type: application/json" \ + -H "Accept: application/json" \ + -d '{ + "features": [ + "driver_hourly_stats:conv_rate", + "driver_hourly_stats:acc_rate", + "driver_hourly_stats:avg_daily_trips" + ], + "entities": { + "driver_id": [1001, 1002, 1003] + }, + "full_feature_names": true + }' | jq + % Total % Received % Xferd Average Speed Time Time Time Current + Dload Upload Total Spent Left Speed +100 1346 100 1055 100 291 3436 947 --:--:-- --:--:-- --:--:-- 4370 +{ + "field_values": [ + { + "fields": { + "driver_id": 1001, + "driver_hourly_stats__conv_rate": 0.025330161675810814, + "driver_hourly_stats__avg_daily_trips": 785, + "driver_hourly_stats__acc_rate": 0.835975170135498 + }, + "statuses": { + "driver_hourly_stats__avg_daily_trips": "PRESENT", + "driver_id": "PRESENT", + "driver_hourly_stats__conv_rate": "PRESENT", + "driver_hourly_stats__acc_rate": "PRESENT" + } + }, + { + "fields": { + "driver_hourly_stats__conv_rate": 0.7595187425613403, + "driver_hourly_stats__acc_rate": 0.1740121990442276, + "driver_id": 1002, + "driver_hourly_stats__avg_daily_trips": 875 + }, + "statuses": { + "driver_hourly_stats__acc_rate": "PRESENT", + "driver_id": "PRESENT", + "driver_hourly_stats__avg_daily_trips": "PRESENT", + "driver_hourly_stats__conv_rate": "PRESENT" + } + }, + { + "fields": { + "driver_hourly_stats__acc_rate": 0.7785481214523315, + "driver_hourly_stats__conv_rate": 0.33832859992980957, + "driver_hourly_stats__avg_daily_trips": 846, + "driver_id": 1003 + }, + "statuses": { + "driver_id": "PRESENT", + "driver_hourly_stats__conv_rate": "PRESENT", + "driver_hourly_stats__acc_rate": "PRESENT", + "driver_hourly_stats__avg_daily_trips": "PRESENT" + } + } + ] +} +``` + diff --git a/docs/reference/alpha-on-demand-feature-view.md b/docs/reference/alpha-on-demand-feature-view.md new file mode 100644 index 0000000000..6be55fb735 --- /dev/null +++ b/docs/reference/alpha-on-demand-feature-view.md @@ -0,0 +1,83 @@ +# \[Alpha\] On demand feature view + +**Warning**: This is an _experimental_ feature. It's intended for early testing and feedback, and could change without warnings in future releases. + +{% hint style="info" %} +To enable this feature, run **`feast alpha enable on_demand_transforms`** +{% endhint %} + +## Overview + +On demand feature views allows users to use existing features and request time data \(features only available at request time\) to transform and create new features. Users define python transformation logic which is executed in both historical retrieval and online retrieval paths. + +Currently, these transformations are executed locally. Future milestones include building a Feature Transformation Server for executing transformations at higher scale. + +## CLI + +There are new CLI commands: + +* `feast on-demand-feature-views list` lists all registered on demand feature view after `feast apply` is run +* `feast on-demand-feature-views describe [NAME]` describes the definition of an on demand feature view + +## Example + +See [https://github.com/feast-dev/on-demand-feature-views-demo](https://github.com/feast-dev/on-demand-feature-views-demo) for an example on how to use on demand feature views. + +### **Registering transformations** + +We register `RequestSource` inputs and the transform in `on_demand_feature_view`: + +```python +from feast import Field, RequestSource +from feast.types import Float64, Int64 +import pandas as pd + +# Define a request data source which encodes features / information only +# available at request time (e.g. part of the user initiated HTTP request) +input_request = RequestSource( + name="vals_to_add", + schema=[ + Field(name='val_to_add', dtype=Int64), + Field(name='val_to_add_2', dtype=Int64) + ] +) + +# Use the input data and feature view features to create new features +@on_demand_feature_view( + sources={ + 'driver_hourly_stats': driver_hourly_stats_view, + 'vals_to_add': input_request + }, + schema=[ + Field(name='conv_rate_plus_val1', dtype=Float64), + Field(name='conv_rate_plus_val2', dtype=Float64) + ] +) +def transformed_conv_rate(features_df: pd.DataFrame) -> pd.DataFrame: + df = pd.DataFrame() + df['conv_rate_plus_val1'] = (features_df['conv_rate'] + features_df['val_to_add']) + df['conv_rate_plus_val2'] = (features_df['conv_rate'] + features_df['val_to_add_2']) + return df +``` + +### **Feature retrieval** + +{% hint style="info" %} +The on demand feature view's name is the function name \(i.e. `transformed_conv_rate`\). +{% endhint %} + +And then to retrieve historical or online features, we can call this in a feature service or reference individual features: + +```python +training_df = store.get_historical_features( + entity_df=entity_df, + features=[ + "driver_hourly_stats:conv_rate", + "driver_hourly_stats:acc_rate", + "driver_hourly_stats:avg_daily_trips", + "transformed_conv_rate:conv_rate_plus_val1", + "transformed_conv_rate:conv_rate_plus_val2", + ], +).to_df() +``` + diff --git a/docs/reference/api.md b/docs/reference/api.md deleted file mode 100644 index 16467bb2dc..0000000000 --- a/docs/reference/api.md +++ /dev/null @@ -1,17 +0,0 @@ -# API Reference - -Please see the following API specific reference documentation: - -* [Feast Core gRPC API](https://api.docs.feast.dev/grpc/feast.core.pb.html): This is the gRPC API used by Feast Core. Feast Core has a dual function of schema registry and job manager. This API contains RPCs for creating and managing feature sets, stores, projects, and jobs. -* [Feast Serving gRPC API](https://api.docs.feast.dev/grpc/feast.serving.pb.html): This is the gRPC API used by Feast Serving. It contains RPCs used for the retrieval of online feature data or historical feature data. -* [Feast gRPC Types](https://api.docs.feast.dev/grpc/feast.types.pb.html): These are the gRPC types used by both Feast Core, Feast Serving, and the Go, Java, and Python clients. -* [Go Client SDK](https://godoc.org/github.com/feast-dev/feast/sdk/go): The Go library used for the retrieval of online features from Feast. -* [Java Client SDK](https://javadoc.io/doc/dev.feast/feast-sdk): The Java library used for the retrieval of online features from Feast. -* [Python SDK](https://api.docs.feast.dev/python/): This is the complete reference to the Feast Python SDK. The SDK is used to manage feature sets, features, jobs, projects, and entities. It can also be used to retrieve training datasets or online features from Feast Serving. - -## Community Contributions - -The following community provided SDKs are available: - -* [Node.js SDK](https://github.com/MichaelHirn/feast-client/): A Node.js SDK written in TypeScript. The SDK can be used to manage feature sets, features, jobs, projects, and entities. - diff --git a/docs/reference/api/README.md b/docs/reference/api/README.md deleted file mode 100644 index cd75f5bf88..0000000000 --- a/docs/reference/api/README.md +++ /dev/null @@ -1,17 +0,0 @@ -# API Reference - -Please see the following API specific reference documentation: - -* [Feast Core gRPC API](https://api.docs.feast.dev/grpc/feast/core/coreservice.pb.html): This is the gRPC API used by Feast Core. This API contains RPCs for creating and managing feature sets, stores, projects, and jobs. -* [Feast Serving gRPC API](https://api.docs.feast.dev/grpc/feast/serving/servingservice.pb.html): This is the gRPC API used by Feast Serving. It contains RPCs used for the retrieval of online feature data or historical feature data. -* [Feast gRPC Types](https://api.docs.feast.dev/grpc/feast/types/value.pb): These are the gRPC types used by both Feast Core, Feast Serving, and the Go, Java, and Python clients. -* [Go Client SDK](https://godoc.org/github.com/feast-dev/feast/sdk/go): The Go library used for the retrieval of online features from Feast. -* [Java Client SDK](https://javadoc.io/doc/dev.feast/feast-sdk): The Java library used for the retrieval of online features from Feast. -* [Python SDK](https://api.docs.feast.dev/python/): This is the complete reference to the Feast Python SDK. The SDK is used to manage feature sets, features, jobs, projects, and entities. It can also be used to retrieve training datasets or online features from Feast Serving. - -## Community Contributions - -The following community provided SDKs are available: - -* [Node.js SDK](https://github.com/MichaelHirn/feast-client/): A Node.js SDK written in TypeScript. The SDK can be used to manage feature sets, features, jobs, projects, and entities. - diff --git a/docs/reference/configuration-reference.md b/docs/reference/configuration-reference.md deleted file mode 100644 index 6f9a97dabf..0000000000 --- a/docs/reference/configuration-reference.md +++ /dev/null @@ -1,132 +0,0 @@ -# Configuration Reference - -## Overview - -This reference describes how to configure Feast components: - -* [Feast Core and Feast Online Serving](configuration-reference.md#2-feast-core-serving-and-job-controller) -* [Feast CLI and Feast Python SDK](configuration-reference.md#3-feast-cli-and-feast-python-sdk) -* [Feast Go and Feast Java SDK](configuration-reference.md#4-feast-java-and-go-sdk) - -## 1. Feast Core and Feast Online Serving - -Available configuration properties for Feast Core and Feast Online Serving can be referenced from the corresponding `application.yml` of each component: - -| Component | Configuration Reference | -| :--- | :--- | -| Core | [core/src/main/resources/application.yml](https://github.com/feast-dev/feast-java/blob/master/core/src/main/resources/application.yml) | -| Serving \(Online\) | [serving/src/main/resources/application.yml](https://github.com/feast-dev/feast-java/blob/master/serving/src/main/resources/application.yml) | - -Configuration properties for Feast Core and Feast Online Serving are defined depending on Feast is deployed: - -* [Docker Compose deployment](configuration-reference.md#docker-compose-deployment) - Feast is deployed with Docker Compose. -* [Kubernetes deployment](configuration-reference.md#kubernetes-deployment) - Feast is deployed with Kubernetes. -* [Direct Configuration](configuration-reference.md#direct-configuration) - Feast is built and run from source code. - -## Docker Compose Deployment - -For each Feast component deployed using Docker Compose, configuration properties from `application.yml` can be set at: - -| Component | Configuration Path | -| :--- | :--- | -| Core | `infra/docker-compose/core/core.yml` | -| Online Serving | `infra/docker-compose/serving/online-serving.yml` | - -## Kubernetes Deployment - -The Kubernetes Feast Deployment is configured using `values.yaml` in the [Helm chart](https://github.com/feast-dev/feast-helm-charts) included with Feast: - -```yaml -# values.yaml -feast-core: - enabled: true # whether to deploy the feast-core subchart to deploy Feast Core. - # feast-core subchart specific config. - gcpServiceAccount: - enabled: true - # .... -``` - -A reference of the sub-chart-specific configuration can found in its `values.yml`: - -* [feast-core](https://github.com/feast-dev/feast-java/tree/master/infra/charts/feast-core) -* [feast-serving](https://github.com/feast-dev/feast-java/tree/master/infra/charts/feast-serving) - -Configuration properties can be set via `application-override.yaml` for each component in `values.yaml`: - -```yaml -# values.yaml -feast-core: - # .... - application-override.yaml: - # application.yml config properties for Feast Core. - # ... -``` - -Visit the [Helm chart](https://github.com/feast-dev/feast-helm-charts) included with Feast to learn more about configuration. - -## Direct Configuration - -If Feast is built and running from source, configuration properties can be set directly in the Feast component's `application.yml`: - -| Component | Configuration Path | -| :--- | :--- | -| Core | [core/src/main/resources/application.yml](https://github.com/feast-dev/feast-java/blob/master/core/src/main/resources/application.yml) | -| Serving \(Online\) | [serving/src/main/resources/application.yml](https://github.com/feast-dev/feast-java/blob/master/serving/src/main/resources/application.yml) | - -## 2. Feast CLI and Feast Python SDK - -Configuration options for both the [Feast CLI](../getting-started/connect-to-feast/feast-cli.md) and [Feast Python SDK](https://api.docs.feast.dev/python/) can be defined in the following locations, in order of precedence: - -**1. Command line arguments or initialized arguments:** Passing parameters to the Feast CLI or instantiating the Feast Client object with specific parameters will take precedence above other parameters. - -```bash -# Set option as command line arguments. -feast config set core_url "localhost:6565" -``` - -```python -# Pass options as initialized arguments. -client = Client( - core_url="localhost:6565", - project="default" -) -``` - -**2. Environmental variables:** Environmental variables can be set to provide configuration options. They must be prefixed with `FEAST_`. For example `FEAST_CORE_URL`. - -```bash -FEAST_CORE_URL=my_feast:6565 FEAST_PROJECT=default feast projects list -``` - -**3. Configuration file:** Options with the lowest precedence are configured in the Feast configuration file. Feast looks for or creates this configuration file in `~/.feast/config` if it does not already exist. All options must be defined in the `[general]` section of this file. - -```text -[general] -project = default -core_url = localhost:6565 -``` - -Visit the [available configuration parameters](https://api.docs.feast.dev/python/#module-feast.constants) for Feast Python SDK and Feast CLI to learn more. - -## 3. Feast Java and Go SDK - -The [Feast Java SDK](https://javadoc.io/doc/dev.feast/feast-sdk/latest/com/gojek/feast/package-summary.html) and [Feast Go SDK](https://godoc.org/github.com/feast-dev/feast/sdk/go) are configured via arguments passed when instantiating the respective Clients: - -### Go SDK - -```go -// configure serving host and port. -cli := feast.NewGrpcClient("localhost", 6566) -``` - -Visit the[ Feast Go SDK API reference](https://godoc.org/github.com/feast-dev/feast/sdk/go) to learn more about available configuration parameters. - -### Java SDK - -```java -// configure serving host and port. -client = FeastClient.create(servingHost, servingPort); -``` - -Visit the [Feast Java SDK API reference](https://javadoc.io/doc/dev.feast/feast-sdk/latest/com/gojek/feast/package-summary.html) to learn more about available configuration parameters. - diff --git a/docs/reference/data-sources/README.md b/docs/reference/data-sources/README.md index ef4fcaa33b..43adf0d593 100644 --- a/docs/reference/data-sources/README.md +++ b/docs/reference/data-sources/README.md @@ -1,10 +1,27 @@ # Data sources -Please see [Data Source](../../concepts/feature-view.md#data-source) for an explanation of data sources. +Please see [Data Source](../../getting-started/concepts/feature-view.md#data-source) for an explanation of data sources. -{% page-ref page="bigquery.md" %} +{% content-ref url="file.md" %} +[file.md](file.md) +{% endcontent-ref %} -{% page-ref page="file.md" %} +{% content-ref url="snowflake.md" %} +[snowflake.md](snowflake.md) +{% endcontent-ref %} +{% content-ref url="bigquery.md" %} +[bigquery.md](bigquery.md) +{% endcontent-ref %} +{% content-ref url="redshift.md" %} +[redshift.md](redshift.md) +{% endcontent-ref %} +{% content-ref url="spark.md" %} +[spark.md](spark.md) +{% endcontent-ref %} + +{% content-ref url="push.md" %} +[push.md](push.md) +{% endcontent-ref %} diff --git a/docs/reference/data-sources/bigquery.md b/docs/reference/data-sources/bigquery.md index 0d6e80424c..47eb9b1bf6 100644 --- a/docs/reference/data-sources/bigquery.md +++ b/docs/reference/data-sources/bigquery.md @@ -1,13 +1,13 @@ # BigQuery -### Description +## Description BigQuery data sources allow for the retrieval of historical feature values from BigQuery for building training datasets as well as materializing features into an online store. * Either a table reference or a SQL query can be provided. * No performance guarantees can be provided over SQL query-based sources. Please use table references where possible. -### Examples +## Examples Using a table reference diff --git a/docs/reference/data-sources/file.md b/docs/reference/data-sources/file.md index f009f5be35..12e6529840 100644 --- a/docs/reference/data-sources/file.md +++ b/docs/reference/data-sources/file.md @@ -1,10 +1,14 @@ # File -### Description +## Description File data sources allow for the retrieval of historical feature values from files on disk for building training datasets, as well as for materializing features into an online store. -### Example +{% hint style="warning" %} +FileSource is meant for development purposes only and is not optimized for production use. +{% endhint %} + +## Example ```python from feast import FileSource @@ -12,7 +16,7 @@ from feast.data_format import ParquetFormat parquet_file_source = FileSource( file_format=ParquetFormat(), - file_url="file:///feast/customer.parquet", + path="file:///feast/customer.parquet", ) ``` diff --git a/docs/reference/data-sources/push.md b/docs/reference/data-sources/push.md new file mode 100644 index 0000000000..e6eff312ec --- /dev/null +++ b/docs/reference/data-sources/push.md @@ -0,0 +1,58 @@ +# Push source + +**Warning**: This is an _experimental_ feature. It's intended for early testing and feedback, and could change without warnings in future releases. + +## Description + +Push sources allow feature values to be pushed to the online store in real time. This allows fresh feature values to be made available to applications. Push sources supercede the +[FeatureStore.write_to_online_store](https://rtd.feast.dev/en/latest/index.html#feast.feature_store.FeatureStore.write_to_online_store). + +Push sources can be used by multiple feature views. When data is pushed to a push source, Feast propagates the feature values to all the consuming feature views. + +Push sources must have a batch source specified, since that's the source used when retrieving historical features. +When using a PushSource as a stream source in the definition of a feature view, a batch source doesn't need to be specified in the definition explicitly. + +## Stream sources +Streaming data sources are important sources of feature values. A typical setup with streaming data looks like: + +1. Raw events come in (stream 1) +2. Streaming transformations applied (e.g. generating features like `last_N_purchased_categories`) (stream 2) +3. Write stream 2 values to an offline store as a historical log for training +4. Write stream 2 values to an online store for low latency feature serving +5. Periodically materialize feature values from the offline store into the online store for improved correctness + +Feast now allows users to push features previously registered in a feature view to the online store for fresher features. + +## Example +### Defining a push source +Note that the push schema needs to also include the entity + +```python +from feast import PushSource, ValueType, BigQuerySource, FeatureView, Feature, Field +from feast.types import Int64 + +push_source = PushSource( + name="push_source", + batch_source=BigQuerySource(table="test.test"), +) + +fv = FeatureView( + name="feature view", + entities=["user_id"], + schema=[Field(name="life_time_value", dtype=Int64)], + source=push_source, +) +``` + +### Pushing data +```python +from feast import FeatureStore +import pandas as pd + +fs = FeatureStore(...) +feature_data_frame = pd.DataFrame() +fs.push("push_source_name", feature_data_frame) +``` + +See also [Python feature server](../feature-servers/python-feature-server.md) for instructions on how to push data to a deployed feature server. + diff --git a/docs/reference/data-sources/redshift.md b/docs/reference/data-sources/redshift.md new file mode 100644 index 0000000000..7f50c64d02 --- /dev/null +++ b/docs/reference/data-sources/redshift.md @@ -0,0 +1,34 @@ +# Redshift + +## Description + +Redshift data sources allow for the retrieval of historical feature values from Redshift for building training datasets as well as materializing features into an online store. + +* Either a table name or a SQL query can be provided. +* No performance guarantees can be provided over SQL query-based sources. Please use table references where possible. + +## Examples + +Using a table name + +```python +from feast import RedshiftSource + +my_redshift_source = RedshiftSource( + table="redshift_table", +) +``` + +Using a query + +```python +from feast import RedshiftSource + +my_redshift_source = RedshiftSource( + query="SELECT timestamp as ts, created, f1, f2 " + "FROM redshift_table", +) +``` + +Configuration options are available [here](https://rtd.feast.dev/en/master/feast.html?#feast.RedshiftSource). + diff --git a/docs/reference/data-sources/snowflake.md b/docs/reference/data-sources/snowflake.md new file mode 100644 index 0000000000..0f5304b6cd --- /dev/null +++ b/docs/reference/data-sources/snowflake.md @@ -0,0 +1,44 @@ +# Snowflake + +## Description + +Snowflake data sources allow for the retrieval of historical feature values from Snowflake for building training datasets as well as materializing features into an online store. + +* Either a table reference or a SQL query can be provided. + +## Examples + +Using a table reference + +```python +from feast import SnowflakeSource + +my_snowflake_source = SnowflakeSource( + database="FEAST", + schema="PUBLIC", + table="FEATURE_TABLE", +) +``` + +Using a query + +```python +from feast import SnowflakeSource + +my_snowflake_source = SnowflakeSource( + query=""" + SELECT + timestamp_column AS "ts", + "created", + "f1", + "f2" + FROM + `FEAST.PUBLIC.FEATURE_TABLE` + """, +) +``` + +One thing to remember is how Snowflake handles table and column name conventions. +You can read more about quote identifiers [here](https://docs.snowflake.com/en/sql-reference/identifiers-syntax.html) + +Configuration options are available [here](https://rtd.feast.dev/en/latest/index.html#feast.data_source.SnowflakeSource). diff --git a/docs/reference/data-sources/spark.md b/docs/reference/data-sources/spark.md new file mode 100644 index 0000000000..2c1d1ec879 --- /dev/null +++ b/docs/reference/data-sources/spark.md @@ -0,0 +1,51 @@ +# Spark + +## Description + +**NOTE**: Spark data source api is currently in alpha development and the API is not completely stable. The API may change or update in the future. + +The spark data source API allows for the retrieval of historical feature values from file/database sources for building training datasets as well as materializing features into an online store. + +* Either a table name, a SQL query, or a file path can be provided. + +## Examples + +Using a table reference from SparkSession(for example, either in memory or a Hive Metastore) + +```python +from feast.infra.offline_stores.contrib.spark_offline_store.spark_source import ( + SparkSource, +) + +my_spark_source = SparkSource( + table="FEATURE_TABLE", +) +``` + +Using a query + +```python +from feast.infra.offline_stores.contrib.spark_offline_store.spark_source import ( + SparkSource, +) + +my_spark_source = SparkSource( + query="SELECT timestamp as ts, created, f1, f2 " + "FROM spark_table", +) +``` + +Using a file reference + +```python +from feast.infra.offline_stores.contrib.spark_offline_store.spark_source import ( + SparkSource, +) + +my_spark_source = SparkSource( + path=f"{CURRENT_DIR}/data/driver_hourly_stats", + file_format="parquet", + timestamp_field="event_timestamp", + created_timestamp_column="created", +) +``` diff --git a/docs/reference/dqm.md b/docs/reference/dqm.md new file mode 100644 index 0000000000..5a02413e53 --- /dev/null +++ b/docs/reference/dqm.md @@ -0,0 +1,77 @@ +# Data Quality Monitoring + +Data Quality Monitoring (DQM) is a Feast module aimed to help users to validate their data with the user-curated set of rules. +Validation could be applied during: +* Historical retrieval (training dataset generation) +* [planned] Writing features into an online store +* [planned] Reading features from an online store + +Its goal is to address several complex data problems, namely: +* Data consistency - new training datasets can be significantly different from previous datasets. This might require a change in model architecture. +* Issues/bugs in the upstream pipeline - bugs in upstream pipelines can cause invalid values to overwrite existing valid values in an online store. +* Training/serving skew - distribution shift could significantly decrease the performance of the model. + +> To monitor data quality, we check that the characteristics of the tested dataset (aka the tested dataset's profile) are "equivalent" to the characteristics of the reference dataset. +> How exactly profile equivalency should be measured is up to the user. + +### Overview + +The validation process consists of the following steps: +1. User prepares reference dataset (currently only [saved datasets](../getting-started/concepts/dataset.md) from historical retrieval are supported). +2. User defines profiler function, which should produce profile by given dataset (currently only profilers based on [Great Expectations](https://docs.greatexpectations.io) are allowed). +3. Validation of tested dataset is performed with reference dataset and profiler provided as parameters. + +### Preparations +Feast with Great Expectations support can be installed via +```shell +pip install 'feast[ge]' +``` + +### Dataset profile +Currently, Feast supports only [Great Expectation's](https://greatexpectations.io/) [ExpectationSuite](https://legacy.docs.greatexpectations.io/en/latest/autoapi/great_expectations/core/expectation_suite/index.html#great_expectations.core.expectation_suite.ExpectationSuite) +as dataset's profile. Hence, the user needs to define a function (profiler) that would receive a dataset and return an [ExpectationSuite](https://legacy.docs.greatexpectations.io/en/latest/autoapi/great_expectations/core/expectation_suite/index.html#great_expectations.core.expectation_suite.ExpectationSuite). + +Great Expectations supports automatic profiling as well as manually specifying expectations: +```python +from great_expectations.dataset import Dataset +from great_expectations.core.expectation_suite import ExpectationSuite + +from feast.dqm.profilers.ge_profiler import ge_profiler + +@ge_profiler +def automatic_profiler(dataset: Dataset) -> ExpectationSuite: + from great_expectations.profile.user_configurable_profiler import UserConfigurableProfiler + + return UserConfigurableProfiler( + profile_dataset=dataset, + ignored_columns=['conv_rate'], + value_set_threshold='few' + ).build_suite() +``` +However, from our experience capabilities of automatic profiler are quite limited. So we would recommend crafting your own expectations: +```python +@ge_profiler +def manual_profiler(dataset: Dataset) -> ExpectationSuite: + dataset.expect_column_max_to_be_between("column", 1, 2) + return dataset.get_expectation_suite() +``` + + + +### Validating Training Dataset +During retrieval of historical features, `validation_reference` can be passed as a parameter to methods `.to_df(validation_reference=...)` or `.to_arrow(validation_reference=...)` of RetrievalJob. +If parameter is provided Feast will run validation once dataset is materialized. In case if validation successful materialized dataset is returned. +Otherwise, `feast.dqm.errors.ValidationFailed` exception would be raised. It will consist of all details for expectations that didn't pass. + +```python +from feast import FeatureStore + +fs = FeatureStore(".") + +job = fs.get_historical_features(...) +job.to_df( + validation_reference=fs + .get_saved_dataset("my_reference_dataset") + .as_reference(profiler=manual_profiler) +) +``` diff --git a/docs/reference/feast-and-spark.md b/docs/reference/feast-and-spark.md deleted file mode 100644 index be05f177ae..0000000000 --- a/docs/reference/feast-and-spark.md +++ /dev/null @@ -1,83 +0,0 @@ ---- -description: Configuring Feast to use Spark for ingestion. ---- - -# Feast and Spark - -Feast relies on Spark to ingest data from the offline store to the online store, streaming ingestion, and running queries to retrieve historical data from the offline store. Feast supports several Spark deployment options. - -## Option 1. Use Kubernetes Operator for Apache Spark - -To install the Spark on K8s Operator - -```bash -helm repo add spark-operator \ - https://googlecloudplatform.github.io/spark-on-k8s-operator - -helm install my-release spark-operator/spark-operator \ - --set serviceAccounts.spark.name=spark -``` - -Currently Feast is tested using `v1beta2-1.1.2-2.4.5`version of the operator image. To configure Feast to use it, set the following options in Feast config: - -| Feast Setting | Value | -| :--- | :--- | -| `SPARK_LAUNCHER` | `"k8s"` | -| `SPARK_STAGING_LOCATION` | S3/GCS/Azure Blob Storage URL to use as a staging location, must be readable and writable by Feast. For S3, use `s3a://` prefix here. Ex.: `s3a://some-bucket/some-prefix/artifacts/` | -| `HISTORICAL_FEATURE_OUTPUT_LOCATION` | S3/GCS/Azure Blob Storage URL used to store results of historical retrieval queries, must be readable and writable by Feast. For S3, use `s3a://` prefix here. Ex.: `s3a://some-bucket/some-prefix/out/` | -| `SPARK_K8S_NAMESPACE` | Only needs to be set if you are customizing the spark-on-k8s-operator. The name of the Kubernetes namespace to run Spark jobs in. This should match the value of `sparkJobNamespace` set on spark-on-k8s-operator Helm chart. Typically this is also the namespace Feast itself will run in. | -| `SPARK_K8S_JOB_TEMPLATE_PATH` | Only needs to be set if you are customizing the Spark job template. Local file path with the template of the SparkApplication resource. No prefix required. Ex.: `/home/jovyan/work/sparkapp-template.yaml`. An example template is [here](https://github.com/feast-dev/feast/blob/4059a21dc4eba9cd27b2d5b0fabe476c07a8b3bd/sdk/python/feast/pyspark/launchers/k8s/k8s_utils.py#L280-L317) and the spec is defined in the [k8s-operator User Guide](https://github.com/GoogleCloudPlatform/spark-on-k8s-operator/blob/master/docs/user-guide.md). | - -Lastly, make sure that the service account used by Feast has permissions to manage Spark Application resources. This depends on your k8s setup, but typically you'd need to configure a Role and a RoleBinding like the one below: - -```text -cat <venv/foo.py
venv/a/foo.py

| You can specify a path to a specific directory. Everything in that directory will be ignored. | +| scripts/foo.py | scripts/foo.py | You can specify a path to a specific file. Only that file will be ignored. | +| scripts/\*.py |

scripts/foo.py
scripts/bar.py

| You can specify an asterisk (\*) anywhere in the expression. An asterisk matches zero or more characters, except "/". | +| scripts/\*\*/foo.py |

scripts/foo.py
scripts/a/foo.py
scripts/a/b/foo.py

| You can specify a double asterisk (\*\*) anywhere in the expression. A double asterisk matches zero or more directories. | diff --git a/docs/reference/feature-repository/feature-store-yaml.md b/docs/reference/feature-repository/feature-store-yaml.md index 0f8be6654e..fa10149cfe 100644 --- a/docs/reference/feature-repository/feature-store-yaml.md +++ b/docs/reference/feature-repository/feature-store-yaml.md @@ -1,10 +1,10 @@ -# feature\_store.yaml +# feature_store.yaml ## Overview `feature_store.yaml` is used to configure a feature store. The file must be located at the root of a [feature repository](./). An example `feature_store.yaml` is shown below: -{% code title="feature\_store.yaml" %} +{% code title="feature_store.yaml" %} ```yaml project: loyal_spider registry: data/registry.db @@ -21,9 +21,8 @@ The following top-level configuration options exist in the `feature_store.yaml` * **provider** — Configures the environment in which Feast will deploy and operate. * **registry** — Configures the location of the feature registry. -* **online\_store** — Configures the online store. -* **offline\_store** — Configures the offline store. -* **project** — Defines a namespace for the entire feature store. Can be used to isolate multiple deployments in a single installation of Feast. +* **online_store** — Configures the online store. +* **offline_store** — Configures the offline store. +* **project** — Defines a namespace for the entire feature store. Can be used to isolate multiple deployments in a single installation of Feast. Should only contain letters, numbers, and underscores. Please see the [RepoConfig](https://rtd.feast.dev/en/latest/#feast.repo_config.RepoConfig) API reference for the full list of configuration options. - diff --git a/docs/reference/feature-repository/registration-inferencing.md b/docs/reference/feature-repository/registration-inferencing.md new file mode 100644 index 0000000000..84faf949e1 --- /dev/null +++ b/docs/reference/feature-repository/registration-inferencing.md @@ -0,0 +1,7 @@ +# Registration Inferencing + +## Overview + +* FeatureView - When the `features` parameter is left out of the feature view definition, upon a `feast apply` call, Feast will automatically consider every column in the data source as a feature to be registered other than the specific timestamp columns associated with the underlying data source definition (e.g. timestamp_field) and the columns associated with the feature view's entities. +* DataSource - When the `timestamp_field` parameter is left out of the data source definition, upon a 'feast apply' call, Feast will automatically find the sole timestamp column in the table underlying the data source and use that as the `timestamp_field`. If there are no columns of timestamp type or multiple columns of timestamp type, `feast apply` will throw an exception. +* Entity - When the `value_type` parameter is left out of the entity definition, upon a `feast apply` call, Feast will automatically find the column corresponding with the entity's `join_key` and take that column's data type to be the `value_type`. If the column doesn't exist, `feast apply` will throw an exception. diff --git a/docs/reference/feature-servers/README.md b/docs/reference/feature-servers/README.md new file mode 100644 index 0000000000..301cea372c --- /dev/null +++ b/docs/reference/feature-servers/README.md @@ -0,0 +1,5 @@ +# Feature servers + +Feast users can choose to retrieve features from a feature server, as opposed to through the Python SDK. + +{% page-ref page="python-feature-server.md" %} diff --git a/docs/reference/feature-servers/go-feature-retrieval.md b/docs/reference/feature-servers/go-feature-retrieval.md new file mode 100644 index 0000000000..30c1a9ca72 --- /dev/null +++ b/docs/reference/feature-servers/go-feature-retrieval.md @@ -0,0 +1,43 @@ +# Go-based Feature Retrieval + +## Overview + +The Go Feature Retrieval component is a Go implementation of the core feature serving logic, embedded in the Python SDK. It supports retrieval of feature references, feature services, and on demand feature views, and can be used either through the Python SDK or the [Python feature server](python-feature-server.md). + +Currently, this component only supports online serving and does not have an offline component including APIs to create feast feature repositories or apply configuration to the registry to facilitate online materialization. It also does not expose its own dedicated cli to perform feast actions. Furthermore, this component is only meant to expose an online serving API that can be called through the python SDK to facilitate faster online feature retrieval. + +The Go Feature Retrieval component currently only supports Redis and Sqlite as online stores; support for other online stores will be added soon. Initial benchmarks indicate that it is significantly faster than the Python feature server for online feature retrieval. We plan to release a more comprehensive set of benchmarks. For more details, see the [RFC](https://docs.google.com/document/d/1Lgqv6eWYFJgQ7LA_jNeTh8NzOPhqI9kGTeyESRpNHnE). + +## Installation + +As long as you are running macOS or linux, on x86, with python version 3.7-3.10, the go component comes pre-compiled when you install feast. + +However, some additional dependencies are required for Go <-> Python interoperability. To install these dependencies run the following command in your console: +``` +pip install feast[go] +``` + +For developers, if you want to build from source, run `make compile-go-lib` to build and compile the go server. + +## Usage + +To enable the Go online feature retrieval component, set `go_feature_retrieval: True` in your `feature_store.yaml`. This will direct all online feature retrieval to Go instead of Python. This flag will be enabled by default in the future. + +{% code title="feature_store.yaml" %} +```yaml +project: my_feature_repo +registry: data/registry.db +provider: local +online_store: + type: redis + connection_string: "localhost:6379" +go_feature_retrieval: True +``` +{% endcode %} + +## Future/Current Work + +The Go feature retrieval online feature logging for Data Quality Monitoring is currently in development. More information can be found [here](https://docs.google.com/document/d/110F72d4NTv80p35wDSONxhhPBqWRwbZXG4f9mNEMd98/edit#heading=h.9gaqqtox9jg6). + +We also plan on adding support for the Java feature server (e.g. the capability to call into the Go component and execute Java UDFs). + diff --git a/docs/reference/feature-servers/python-feature-server.md b/docs/reference/feature-servers/python-feature-server.md new file mode 100644 index 0000000000..352f0edc16 --- /dev/null +++ b/docs/reference/feature-servers/python-feature-server.md @@ -0,0 +1,195 @@ +# Python feature server + +## Overview + +The feature server is an HTTP endpoint that serves features with JSON I/O. This enables users to write + read features from Feast online stores using any programming language that can make HTTP requests. + +## CLI + +There is a CLI command that starts the server: `feast serve`. By default, Feast uses port 6566; the port be overridden by a `--port` flag. + +## Deploying as a service + +One can also deploy a feature server by building a docker image that bundles in the project's `feature_store.yaml`. See [helm chart](https://github.com/feast-dev/feast/blob/master/infra/charts/feast-python-server) for example. + +A [remote feature server](../alpha-aws-lambda-feature-server.md) on AWS Lambda is available. A remote feature server on GCP Cloud Run is currently being developed. + + +## Example + +### Initializing a feature server +Here's the local feature server usage example with the local template: + +```bash +$ feast init feature_repo +Creating a new Feast repository in /home/tsotne/feast/feature_repo. + +$ cd feature_repo + +$ feast apply +Registered entity driver_id +Registered feature view driver_hourly_stats +Deploying infrastructure for driver_hourly_stats + +$ feast materialize-incremental $(date +%Y-%m-%d) +Materializing 1 feature views to 2021-09-09 17:00:00-07:00 into the sqlite online store. + +driver_hourly_stats from 2021-09-09 16:51:08-07:00 to 2021-09-09 17:00:00-07:00: +100%|████████████████████████████████████████████████████████████████| 5/5 [00:00<00:00, 295.24it/s] + +$ feast serve +This is an experimental feature. It's intended for early testing and feedback, and could change without warnings in future releases. +INFO: Started server process [8889] +09/10/2021 10:42:11 AM INFO:Started server process [8889] +INFO: Waiting for application startup. +09/10/2021 10:42:11 AM INFO:Waiting for application startup. +INFO: Application startup complete. +09/10/2021 10:42:11 AM INFO:Application startup complete. +INFO: Uvicorn running on http://127.0.0.1:6566 (Press CTRL+C to quit) +09/10/2021 10:42:11 AM INFO:Uvicorn running on http://127.0.0.1:6566 (Press CTRL+C to quit) +``` + +### Retrieving features from the online store +After the server starts, we can execute cURL commands from another terminal tab: + +```bash +$ curl -X POST \ + "http://localhost:6566/get-online-features" \ + -d '{ + "features": [ + "driver_hourly_stats:conv_rate", + "driver_hourly_stats:acc_rate", + "driver_hourly_stats:avg_daily_trips" + ], + "entities": { + "driver_id": [1001, 1002, 1003] + } + }' | jq +{ + "metadata": { + "feature_names": [ + "driver_id", + "conv_rate", + "avg_daily_trips", + "acc_rate" + ] + }, + "results": [ + { + "values": [ + 1001, + 0.7037263512611389, + 308, + 0.8724706768989563 + ], + "statuses": [ + "PRESENT", + "PRESENT", + "PRESENT", + "PRESENT" + ], + "event_timestamps": [ + "1970-01-01T00:00:00Z", + "2021-12-31T23:00:00Z", + "2021-12-31T23:00:00Z", + "2021-12-31T23:00:00Z" + ] + }, + { + "values": [ + 1002, + 0.038169607520103455, + 332, + 0.48534533381462097 + ], + "statuses": [ + "PRESENT", + "PRESENT", + "PRESENT", + "PRESENT" + ], + "event_timestamps": [ + "1970-01-01T00:00:00Z", + "2021-12-31T23:00:00Z", + "2021-12-31T23:00:00Z", + "2021-12-31T23:00:00Z" + ] + }, + { + "values": [ + 1003, + 0.9665873050689697, + 779, + 0.7793770432472229 + ], + "statuses": [ + "PRESENT", + "PRESENT", + "PRESENT", + "PRESENT" + ], + "event_timestamps": [ + "1970-01-01T00:00:00Z", + "2021-12-31T23:00:00Z", + "2021-12-31T23:00:00Z", + "2021-12-31T23:00:00Z" + ] + } + ] +} +``` + +It's also possible to specify a feature service name instead of the list of features: + +```text +curl -X POST \ + "http://localhost:6566/get-online-features" \ + -d '{ + "feature_service": , + "entities": { + "driver_id": [1001, 1002, 1003] + } + }' | jq +``` + +### Pushing features to the online store +You can push data corresponding to a push source to the online store (note that timestamps need to be strings): + +```text +curl -X POST "http://localhost:6566/push" -d '{ + "push_source_name": "driver_hourly_stats_push_source", + "df": { + "driver_id": [1001], + "event_timestamp": ["2022-05-13 10:59:42"], + "created": ["2022-05-13 10:59:42"], + "conv_rate": [1.0], + "acc_rate": [1.0], + "avg_daily_trips": [1000] + } + }' | jq +``` + +or equivalently from Python: +```python +import json +import requests +import pandas as pd +from datetime import datetime + +event_dict = { + "driver_id": [1001], + "event_timestamp": [str(datetime(2021, 5, 13, 10, 59, 42))], + "created": [str(datetime(2021, 5, 13, 10, 59, 42))], + "conv_rate": [1.0], + "acc_rate": [1.0], + "avg_daily_trips": [1000], + "string_feature": "test2", +} +push_data = { + "push_source_name":"driver_stats_push_source", + "df":event_dict +} +requests.post( + "http://localhost:6566/push", + data=json.dumps(push_data)) +``` diff --git a/docs/reference/limitations.md b/docs/reference/limitations.md deleted file mode 100644 index efde485a80..0000000000 --- a/docs/reference/limitations.md +++ /dev/null @@ -1,52 +0,0 @@ -# Limitations - -### Feast API - -
- - - - - - - - - - - - - - - - -
LimitationMotivation
Features names and entity names cannot overlap in feature table definitionsFeatures and entities become columns in historical stores which may cause - conflicts
-

The following field names are reserved in feature tables

-
    -
  • event_timestamp -
  • -
  • datetime -
  • -
  • created_timestamp -
  • -
  • ingestion_id -
  • -
  • job_id -
  • -
-
These keywords are used for column names when persisting metadata in historical - stores
- -### Ingestion - -| Limitation | Motivation | -| :--- | :--- | -| Once data has been ingested into Feast, there is currently no way to delete the data without manually going to the database and deleting it. However, during retrieval only the latest rows will be returned for a specific key \(`event_timestamp`, `entity`\) based on its `created_timestamp`. | This functionality simply doesn't exist yet as a Feast API | - -### Storage - -| Limitation | Motivation | -| :--- | :--- | -| Feast does not support offline storage in Feast 0.8 | As part of our re-architecture of Feast, we moved from GCP to cloud-agnostic deployments. Developing offline storage support that is available in all cloud environments is a pending action. | - diff --git a/docs/reference/metrics-reference.md b/docs/reference/metrics-reference.md deleted file mode 100644 index 34c97c7be6..0000000000 --- a/docs/reference/metrics-reference.md +++ /dev/null @@ -1,178 +0,0 @@ -# Metrics Reference - -{% hint style="warning" %} -This page applies to Feast 0.7. The content may be out of date for Feast 0.8+ -{% endhint %} - -Reference of the metrics that each Feast component exports: - -* [Feast Core](metrics-reference.md#feast-core) -* [Feast Serving](metrics-reference.md#feast-serving) -* [Feast Ingestion Job](metrics-reference.md#feast-ingestion-job) - -For how to configure Feast to export Metrics, see the [Metrics user guide.](../advanced/metrics.md) - -## Feast Core - -**Exported Metrics** - -Feast Core exports the following metrics: - -| Metrics | Description | Tags | -| :--- | :--- | :--- | -| `feast_core_request_latency_seconds` | Feast Core's latency in serving Requests in Seconds. | `service`, `method`, `status_code` | -| `feast_core_feature_set_total` | No. of Feature Sets registered with Feast Core. | None | -| `feast_core_store_total` | No. of Stores registered with Feast Core. | None | -| `feast_core_max_memory_bytes` | Max amount of memory the Java virtual machine will attempt to use. | None | -| `feast_core_total_memory_bytes` | Total amount of memory in the Java virtual machine | None | -| `feast_core_free_memory_bytes` | Total amount of free memory in the Java virtual machine. | None | -| `feast_core_gc_collection_seconds` | Time spent in a given JVM garbage collector in seconds. | None | - -**Metric Tags** - -Exported Feast Core metrics may be filtered by the following tags/keys - -| Tag | Description | -| :--- | :--- | -| `service` | Name of the Service that request is made to. Should be set to `CoreService` | -| `method` | Name of the Method that the request is calling. \(ie `ListFeatureSets`\) | -| `status_code` | Status code returned as a result of handling the requests \(ie `OK`\). Can be used to find request failures. | - -## Feast Serving - -**Exported Metrics** - -Feast Serving exports the following metrics: - -| Metric | Description | Tags | -| :--- | :--- | :--- | -| `feast_serving_request_latency_seconds` | Feast Serving's latency in serving Requests in Seconds. | `method` | -| `feast_serving_request_feature_count` | No. of requests retrieving a Feature from Feast Serving. | `project`, `feature_name` | -| `feast_serving_not_found_feature_count` | No. of requests retrieving a Feature has resulted in a [`NOT_FOUND` field status.](../user-guide/getting-training-features.md#online-field-statuses) | `project`, `feature_name` | -| `feast_serving_stale_feature_count` | No. of requests retrieving a Feature resulted in a [`OUTSIDE_MAX_AGE` field status.](../user-guide/getting-training-features.md#online-field-statuses) | `project`, `feature_name` | -| `feast_serving_grpc_request_count` | Total gRPC requests served. | `method` | - -**Metric Tags** - -Exported Feast Serving metrics may be filtered by the following tags/keys - -| Tag | Description | -| :--- | :--- | -| `method` | Name of the Method that the request is calling. \(ie `ListFeatureSets`\) | -| `status_code` | Status code returned as a result of handling the requests \(ie `OK`\). Can be used to find request failures. | -| `project` | Name of the project that the FeatureSet of the Feature retrieved belongs to. | -| `feature_name` | Name of the Feature being retrieved. | - -## Feast Ingestion Job - -Feast Ingestion computes both metrics an statistics on [data ingestion.](../user-guide/define-and-ingest-features.md) Make sure you familar with data ingestion concepts before proceeding. - -**Metrics Namespace** - -Metrics are computed at two stages of the Feature Row's/Feature Value's life cycle when being processed by the Ingestion Job: - -* `Inflight`- Prior to writing data to stores, but after successful validation of data. -* `WriteToStoreSucess`- After a successful store write. - -Metrics processed by each staged will be tagged with `metrics_namespace` to the stage where the metric was computed. - -**Metrics Bucketing** - -Metrics with a `{BUCKET}` are computed on a 60 second window/bucket. Suffix with the following to select the bucket to use: - -* `min` - minimum value. -* `max` - maximum value. -* `mean`- mean value. -* `percentile_90`- 90 percentile. -* `percentile_95`- 95 percentile. -* `percentile_99`- 99 percentile. - -**Exported Metrics** - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
MetricDescriptionTags
feast_ingestion_feature_row_lag_ms_{BUCKET} - Lag time in milliseconds between succeeding ingested Feature Rows. -

feast_store, feast_project_name,feast_featureSet_name,ingestion_job_name,

-

metrics_namespace -

-
feast_ingestion_feature_value_lag_ms_{BUCKET} - Lag time in milliseconds between succeeding ingested values for each Feature. -

feast_store, feast_project_name,feast_featureSet_name,

-

feast_feature_name,

-

ingestion_job_name,

-

metrics_namespace -

-
feast_ingestion_feature_value_{BUCKET} - Last value feature for each Feature.feast_store, feature_project_name, feast_feature_name,feast_featureSet_name, ingest_job_name, metrics_namepace -
feast_ingestion_feature_row_ingested_count - No. of Ingested Feature Rows -

feast_store, feast_project_name,feast_featureSet_name,ingestion_job_name,

-

metrics_namespace -

-
feast_ingestion_feature_value_missing_count - No. of times a ingested Feature values did not provide a value for the - Feature. -

feast_store, feast_project_name,feast_featureSet_name,

-

feast_feature_name,

-

ingestion_job_name,

-

metrics_namespace -

-
feast_ingestion_deadletter_row_count - No. of Feature Rows that that the Ingestion Job did not successfully write - to store.feast_store, feast_project_name,feast_featureSet_name,ingestion_job_name -
- -**Metric Tags** - -Exported Feast Ingestion Job metrics may be filtered by the following tags/keys - -| Tag | Description | -| :--- | :--- | -| `feast_store` | Name of the target store the Ingestion Job is writing to. | -| `feast_project_name` | Name of the project that the ingested FeatureSet belongs to. | -| `feast_featureSet_name` | Name of the Feature Set being ingested. | -| `feast_feature_name` | Name of the Feature being ingested. | -| `ingestion_job_name` | Name of the Ingestion Job performing data ingestion. Typically this is set to the Id of the Ingestion Job. | -| `metrics_namespace` | Stage where metrics where computed. Either `Inflight` or `WriteToStoreSuccess` | - diff --git a/docs/reference/offline-stores/README.md b/docs/reference/offline-stores/README.md index e2f4e1b8fd..b3c85470b9 100644 --- a/docs/reference/offline-stores/README.md +++ b/docs/reference/offline-stores/README.md @@ -1,8 +1,13 @@ # Offline stores -Please see [Offline Store](../../concepts/offline-store.md) for an explanation of offline stores. +Please see [Offline Store](../../getting-started/architecture-and-components/offline-store.md) for an explanation of offline stores. {% page-ref page="file.md" %} -{% page-ref page="untitled.md" %} +{% page-ref page="snowflake.md" %} +{% page-ref page="bigquery.md" %} + +{% page-ref page="redshift.md" %} + +{% page-ref page="spark.md" %} diff --git a/docs/reference/offline-stores/untitled.md b/docs/reference/offline-stores/bigquery.md similarity index 92% rename from docs/reference/offline-stores/untitled.md rename to docs/reference/offline-stores/bigquery.md index 8ffa566a70..255c587d6b 100644 --- a/docs/reference/offline-stores/untitled.md +++ b/docs/reference/offline-stores/bigquery.md @@ -1,6 +1,6 @@ # BigQuery -### Description +## Description The BigQuery offline store provides support for reading [BigQuerySources](../data-sources/bigquery.md). @@ -9,9 +9,9 @@ The BigQuery offline store provides support for reading [BigQuerySources](../dat * Entity dataframes can be provided as a SQL query or can be provided as a Pandas dataframe. Pandas dataframes will be uploaded to BigQuery in order to complete join operations. * A [BigQueryRetrievalJob](https://github.com/feast-dev/feast/blob/c50a36ec1ad5b8d81c6f773c23204db7c7a7d218/sdk/python/feast/infra/offline_stores/bigquery.py#L210) is returned when calling `get_historical_features()`. -### Example +## Example -{% code title="feature\_store.yaml" %} +{% code title="feature_store.yaml" %} ```yaml project: my_feature_repo registry: gs://my-bucket/data/registry.db @@ -23,4 +23,3 @@ offline_store: {% endcode %} Configuration options are available [here](https://rtd.feast.dev/en/latest/#feast.repo_config.BigQueryOfflineStoreConfig). - diff --git a/docs/reference/offline-stores/file.md b/docs/reference/offline-stores/file.md index b4ce3b1b66..42ac821691 100644 --- a/docs/reference/offline-stores/file.md +++ b/docs/reference/offline-stores/file.md @@ -1,15 +1,15 @@ # File -### Description +## Description The File offline store provides support for reading [FileSources](../data-sources/file.md). * Only Parquet files are currently supported. * All data is downloaded and joined using Python and may not scale to production workloads. -### Example +## Example -{% code title="feature\_store.yaml" %} +{% code title="feature_store.yaml" %} ```yaml project: my_feature_repo registry: data/registry.db @@ -20,4 +20,3 @@ offline_store: {% endcode %} Configuration options are available [here](https://rtd.feast.dev/en/latest/#feast.repo_config.FileOfflineStoreConfig). - diff --git a/docs/reference/offline-stores/redshift.md b/docs/reference/offline-stores/redshift.md new file mode 100644 index 0000000000..73148730c5 --- /dev/null +++ b/docs/reference/offline-stores/redshift.md @@ -0,0 +1,124 @@ +# Redshift + +## Description + +The Redshift offline store provides support for reading [RedshiftSources](../data-sources/redshift.md). + +* Redshift tables and views are allowed as sources. +* All joins happen within Redshift. +* Entity dataframes can be provided as a SQL query or can be provided as a Pandas dataframe. Pandas dataframes will be uploaded to Redshift in order to complete join operations. +* A [RedshiftRetrievalJob](https://github.com/feast-dev/feast/blob/bf557bcb72c7878a16dccb48443bbbe9dc3efa49/sdk/python/feast/infra/offline_stores/redshift.py#L161) is returned when calling `get_historical_features()`. + +## Example + +{% code title="feature_store.yaml" %} +```yaml +project: my_feature_repo +registry: data/registry.db +provider: aws +offline_store: + type: redshift + region: us-west-2 + cluster_id: feast-cluster + database: feast-database + user: redshift-user + s3_staging_location: s3://feast-bucket/redshift + iam_role: arn:aws:iam::123456789012:role/redshift_s3_access_role +``` +{% endcode %} + +Configuration options are available [here](https://github.com/feast-dev/feast/blob/bf557bcb72c7878a16dccb48443bbbe9dc3efa49/sdk/python/feast/infra/offline_stores/redshift.py#L22). + +## Permissions + +Feast requires the following permissions in order to execute commands for Redshift offline store: + +| **Command** | Permissions | Resources | +| --------------------------- | -------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| **Apply** |

redshift-data:DescribeTable

redshift:GetClusterCredentials

|

arn:aws:redshift:<region>:<account_id>:dbuser:<redshift_cluster_id>/<redshift_username>

arn:aws:redshift:<region>:<account_id>:dbname:<redshift_cluster_id>/<redshift_database_name>

arn:aws:redshift:<region>:<account_id>:cluster:<redshift_cluster_id>

| +| **Materialize** | redshift-data:ExecuteStatement | arn:aws:redshift:\:\:cluster:\ | +| **Materialize** | redshift-data:DescribeStatement | \* | +| **Materialize** |

s3:ListBucket

s3:GetObject

s3:DeleteObject

|

arn:aws:s3:::<bucket_name>

arn:aws:s3:::<bucket_name>/*

| +| **Get Historical Features** |

redshift-data:ExecuteStatement

redshift:GetClusterCredentials

|

arn:aws:redshift:<region>:<account_id>:dbuser:<redshift_cluster_id>/<redshift_username>

arn:aws:redshift:<region>:<account_id>:dbname:<redshift_cluster_id>/<redshift_database_name>

arn:aws:redshift:<region>:<account_id>:cluster:<redshift_cluster_id>

| +| **Get Historical Features** | redshift-data:DescribeStatement | \* | +| **Get Historical Features** |

s3:ListBucket

s3:GetObject

s3:PutObject

s3:DeleteObject

|

arn:aws:s3:::<bucket_name>

arn:aws:s3:::<bucket_name>/*

| + +The following inline policy can be used to grant Feast the necessary permissions: + +```javascript +{ + "Statement": [ + { + "Action": [ + "s3:ListBucket", + "s3:PutObject", + "s3:GetObject", + "s3:DeleteObject" + ], + "Effect": "Allow", + "Resource": [ + "arn:aws:s3:::/*", + "arn:aws:s3:::" + ] + }, + { + "Action": [ + "redshift-data:DescribeTable", + "redshift:GetClusterCredentials", + "redshift-data:ExecuteStatement" + ], + "Effect": "Allow", + "Resource": [ + "arn:aws:redshift:::dbuser:/", + "arn:aws:redshift:::dbname:/", + "arn:aws:redshift:::cluster:" + ] + }, + { + "Action": [ + "redshift-data:DescribeStatement" + ], + "Effect": "Allow", + "Resource": "*" + } + ], + "Version": "2012-10-17" +} +``` + +In addition to this, Redshift offline store requires an IAM role that will be used by Redshift itself to interact with S3. More concretely, Redshift has to use this IAM role to run [UNLOAD](https://docs.aws.amazon.com/redshift/latest/dg/r_UNLOAD.html) and [COPY](https://docs.aws.amazon.com/redshift/latest/dg/r_COPY.html) commands. Once created, this IAM role needs to be configured in `feature_store.yaml` file as `offline_store: iam_role`. + +The following inline policy can be used to grant Redshift necessary permissions to access S3: + +```javascript +{ + "Statement": [ + { + "Action": "s3:*", + "Effect": "Allow", + "Resource": [ + "arn:aws:s3:::feast-integration-tests", + "arn:aws:s3:::feast-integration-tests/*" + ] + } + ], + "Version": "2012-10-17" +} +``` + +While the following trust relationship is necessary to make sure that Redshift, and only Redshift can assume this role: + +```javascript +{ + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Principal": { + "Service": "redshift.amazonaws.com" + }, + "Action": "sts:AssumeRole" + } + ] +} +``` diff --git a/docs/reference/offline-stores/snowflake.md b/docs/reference/offline-stores/snowflake.md new file mode 100644 index 0000000000..aa006b43bb --- /dev/null +++ b/docs/reference/offline-stores/snowflake.md @@ -0,0 +1,34 @@ +# Snowflake + +## Description + +The Snowflake offline store provides support for reading [SnowflakeSources](../data-sources/snowflake.md). + +* Snowflake tables and views are allowed as sources. +* All joins happen within Snowflake. +* Entity dataframes can be provided as a SQL query or can be provided as a Pandas dataframe. Pandas dataframes will be uploaded to Snowflake in order to complete join operations. +* A `SnowflakeRetrievalJob` is returned when calling `get_historical_features()`. + * This allows you to call + * `to_snowflake` to save the dataset into Snowflake + * `to_sql` to get the SQL query that would execute on `to_df` + * `to_arrow_chunks` to get the result in batches ([Snowflake python connector docs](https://docs.snowflake.com/en/user-guide/python-connector-api.html#get_result_batches)) + +## Example + +{% code title="feature_store.yaml" %} +```yaml +project: my_feature_repo +registry: data/registry.db +provider: local +offline_store: + type: snowflake.offline + account: snowflake_deployment.us-east-1 + user: user_login + password: user_password + role: sysadmin + warehouse: demo_wh + database: FEAST +``` +{% endcode %} + +Configuration options are available in [SnowflakeOfflineStoreConfig](https://github.com/feast-dev/feast/blob/master/sdk/python/feast/infra/offline_stores/snowflake.py#L56). diff --git a/docs/reference/offline-stores/spark.md b/docs/reference/offline-stores/spark.md new file mode 100644 index 0000000000..48ddf46d17 --- /dev/null +++ b/docs/reference/offline-stores/spark.md @@ -0,0 +1,38 @@ +# Spark + +## Description + +The Spark offline store is an offline store currently in alpha development that provides support for reading [SparkSources](../data-sources/spark.md). + +## Disclaimer + +This Spark offline store still does not achieve full test coverage and continues to fail some integration tests when integrating with the feast universal test suite. Please do NOT assume complete stability of the API. + +* Spark tables and views are allowed as sources that are loaded in from some Spark store(e.g in Hive or in memory). +* Entity dataframes can be provided as a SQL query or can be provided as a Pandas dataframe. Pandas dataframes will be converted to a Spark dataframe and processed as a temporary view. +* A `SparkRetrievalJob` is returned when calling `get_historical_features()`. + * This allows you to call + * `to_df` to retrieve the pandas dataframe. + * `to_arrow` to retrieve the dataframe as a pyarrow Table. + * `to_spark_df` to retrieve the dataframe the spark. + +## Example + +{% code title="feature_store.yaml" %} +```yaml +project: my_project +registry: data/registry.db +provider: local +offline_store: + type: spark + spark_conf: + spark.master: "local[*]" + spark.ui.enabled: "false" + spark.eventLog.enabled: "false" + spark.sql.catalogImplementation: "hive" + spark.sql.parser.quotedRegexColumnNames: "true" + spark.sql.session.timeZone: "UTC" +online_store: + path: data/online_store.db +``` +{% endcode %} diff --git a/docs/reference/online-stores/README.md b/docs/reference/online-stores/README.md index 0a240ffb10..2c2902bc57 100644 --- a/docs/reference/online-stores/README.md +++ b/docs/reference/online-stores/README.md @@ -1,6 +1,6 @@ # Online stores -Please see [Online Store](../../concepts/online-store.md) for an explanation of online stores. +Please see [Online Store](../../getting-started/architecture-and-components/online-store.md) for an explanation of online stores. {% page-ref page="sqlite.md" %} @@ -8,3 +8,4 @@ Please see [Online Store](../../concepts/online-store.md) for an explanation of {% page-ref page="datastore.md" %} +{% page-ref page="dynamodb.md" %} diff --git a/docs/reference/online-stores/datastore.md b/docs/reference/online-stores/datastore.md index 730fab5969..012d497f30 100644 --- a/docs/reference/online-stores/datastore.md +++ b/docs/reference/online-stores/datastore.md @@ -1,12 +1,12 @@ # Datastore -### Description +## Description -The [Datastore](https://cloud.google.com/datastore) online store provides support for materializing feature values into Cloud Datastore. The data model used to store feature values in Datastore is described in more detail [here](https://github.com/feast-dev/feast/blob/master/docs/specs/online_store_format.md#google-datastore-online-store-format). +The [Datastore](https://cloud.google.com/datastore) online store provides support for materializing feature values into Cloud Datastore. The data model used to store feature values in Datastore is described in more detail [here](../../specs/online_store_format.md#google-datastore-online-store-format). -### Example +## Example -{% code title="feature\_store.yaml" %} +{% code title="feature_store.yaml" %} ```yaml project: my_feature_repo registry: data/registry.db @@ -19,4 +19,3 @@ online_store: {% endcode %} Configuration options are available [here](https://rtd.feast.dev/en/latest/#feast.repo_config.DatastoreOnlineStoreConfig). - diff --git a/docs/reference/online-stores/dynamodb.md b/docs/reference/online-stores/dynamodb.md new file mode 100644 index 0000000000..2af7e422d6 --- /dev/null +++ b/docs/reference/online-stores/dynamodb.md @@ -0,0 +1,55 @@ +# DynamoDB + +## Description + +The [DynamoDB](https://aws.amazon.com/dynamodb/) online store provides support for materializing feature values into AWS DynamoDB. + +## Example + +{% code title="feature_store.yaml" %} +```yaml +project: my_feature_repo +registry: data/registry.db +provider: aws +online_store: + type: dynamodb + region: us-west-2 +``` +{% endcode %} + +Configuration options are available [here](https://github.com/feast-dev/feast/blob/17bfa6118d6658d2bff53d7de8e2ccef5681714d/sdk/python/feast/infra/online_stores/dynamodb.py#L36). + +## Permissions + +Feast requires the following permissions in order to execute commands for DynamoDB online store: + +| **Command** | Permissions | Resources | +| ----------------------- | ----------------------------------------------------------------------------------- | ------------------------------------------------- | +| **Apply** |

dynamodb:CreateTable

dynamodb:DescribeTable

dynamodb:DeleteTable

| arn:aws:dynamodb:\:\:table/\* | +| **Materialize** | dynamodb.BatchWriteItem | arn:aws:dynamodb:\:\:table/\* | +| **Get Online Features** | dynamodb.BatchGetItem | arn:aws:dynamodb:\:\:table/\* | + +The following inline policy can be used to grant Feast the necessary permissions: + +```javascript +{ + "Statement": [ + { + "Action": [ + "dynamodb:CreateTable", + "dynamodb:DescribeTable", + "dynamodb:DeleteTable", + "dynamodb:BatchWriteItem", + "dynamodb:BatchGetItem" + ], + "Effect": "Allow", + "Resource": [ + "arn:aws:dynamodb:::table/*" + ] + } + ], + "Version": "2012-10-17" +} +``` + +Lastly, this IAM role needs to be associated with the desired Redshift cluster. Please follow the official AWS guide for the necessary steps [here](https://docs.aws.amazon.com/redshift/latest/dg/c-getting-started-using-spectrum-add-role.html). diff --git a/docs/reference/online-stores/redis.md b/docs/reference/online-stores/redis.md index adcff9a8ea..ce1de2ad54 100644 --- a/docs/reference/online-stores/redis.md +++ b/docs/reference/online-stores/redis.md @@ -1,17 +1,17 @@ # Redis -### Description +## Description -The [Redis](https://redis.io/) online store provides support for materializing feature values into Redis. +The [Redis](https://redis.io) online store provides support for materializing feature values into Redis. * Both Redis and Redis Cluster are supported -* The data model used to store feature values in Redis is described in more detail [here](https://github.com/feast-dev/feast/blob/master/docs/specs/online_store_format.md). +* The data model used to store feature values in Redis is described in more detail [here](../../specs/online\_store\_format.md). -### Examples +## Examples Connecting to a single Redis instance -{% code title="feature\_store.yaml" %} +{% code title="feature_store.yaml" %} ```yaml project: my_feature_repo registry: data/registry.db @@ -24,7 +24,7 @@ online_store: Connecting to a Redis Cluster with SSL enabled and password authentication -{% code title="feature\_store.yaml" %} +{% code title="feature_store.yaml" %} ```yaml project: my_feature_repo registry: data/registry.db @@ -36,5 +36,4 @@ online_store: ``` {% endcode %} -Configuration options are available [here](https://rtd.feast.dev/en/master/#feast.repo_config.RedisOnlineStoreConfig). - +Configuration options are available [here](https://rtd.feast.dev/en/master/#feast.infra.online\_stores.redis.RedisOnlineStoreConfig). diff --git a/docs/reference/online-stores/sqlite.md b/docs/reference/online-stores/sqlite.md index 2191b0cc5d..fd11e3439c 100644 --- a/docs/reference/online-stores/sqlite.md +++ b/docs/reference/online-stores/sqlite.md @@ -1,15 +1,15 @@ # SQLite -### Description +## Description The [SQLite](https://www.sqlite.org/index.html) online store provides support for materializing feature values into an SQLite database for serving online features. * All feature values are stored in an on-disk SQLite database * Only the latest feature values are persisted -### Example +## Example -{% code title="feature\_store.yaml" %} +{% code title="feature_store.yaml" %} ```yaml project: my_feature_repo registry: data/registry.db @@ -21,4 +21,3 @@ online_store: {% endcode %} Configuration options are available [here](https://rtd.feast.dev/en/latest/#feast.repo_config.SqliteOnlineStoreConfig). - diff --git a/docs/reference/providers/README.md b/docs/reference/providers/README.md index ffc1c9805f..dc52d92726 100644 --- a/docs/reference/providers/README.md +++ b/docs/reference/providers/README.md @@ -1,8 +1,9 @@ # Providers -Please see [Provider](../../concepts/provider.md) for an explanation of providers. +Please see [Provider](../../getting-started/architecture-and-components/provider.md) for an explanation of providers. {% page-ref page="local.md" %} {% page-ref page="google-cloud-platform.md" %} +{% page-ref page="amazon-web-services.md" %} diff --git a/docs/reference/providers/amazon-web-services.md b/docs/reference/providers/amazon-web-services.md new file mode 100644 index 0000000000..3135fedb74 --- /dev/null +++ b/docs/reference/providers/amazon-web-services.md @@ -0,0 +1,27 @@ +# Amazon Web Services + +## Description + +* Offline Store: Uses the **Redshift** offline store by default. Also supports File as the offline store. +* Online Store: Uses the **DynamoDB** online store by default. Also supports Sqlite as an online store. + +## Example + +{% code title="feature_store.yaml" %} +```yaml +project: my_feature_repo +registry: data/registry.db +provider: aws +online_store: + type: dynamodb + region: us-west-2 +offline_store: + type: redshift + region: us-west-2 + cluster_id: feast-cluster + database: feast-database + user: redshift-user + s3_staging_location: s3://feast-bucket/redshift + iam_role: arn:aws:iam::123456789012:role/redshift_s3_access_role +``` +{% endcode %} diff --git a/docs/reference/providers/google-cloud-platform.md b/docs/reference/providers/google-cloud-platform.md index af6444187d..713313d16b 100644 --- a/docs/reference/providers/google-cloud-platform.md +++ b/docs/reference/providers/google-cloud-platform.md @@ -1,102 +1,27 @@ # Google Cloud Platform -### Description +## Description * Offline Store: Uses the **BigQuery** offline store by default. Also supports File as the offline store. * Online Store: Uses the **Datastore** online store by default. Also supports Sqlite as an online store. -### Example +## Example -{% code title="feature\_store.yaml" %} +{% code title="feature_store.yaml" %} ```yaml project: my_feature_repo registry: gs://my-bucket/data/registry.db -provider: gcp +provider: gcp ``` {% endcode %} -### **Permissions** - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
Command - ComponentPermissionsRecommended Role
Apply - BigQuery (source) -

bigquery.jobs.create

-

bigquery.readsessions.create

-

bigquery.readsessions.getData

-
roles/bigquery.user
Apply - Datastore (destination) -

datastore.entities.allocateIds

-

datastore.entities.create

-

datastore.entities.delete

-

datastore.entities.get

-

datastore.entities.list

-

datastore.entities.update

-
roles/datastore.owner
Materialize - BigQuery (source)bigquery.jobs.createroles/bigquery.user
Materialize - Datastore (destination) -

datastore.entities.allocateIds

-

datastore.entities.create

-

datastore.entities.delete

-

datastore.entities.get

-

datastore.entities.list

-

datastore.entities.update

-

datastore.databases.get

-
roles/datastore.owner
Get Online Features - Datastoredatastore.entities.getroles/datastore.user
Get Historical Features - BigQuery (source) -

bigquery.datasets.get

-

bigquery.tables.get

-

bigquery.tables.create

-

bigquery.tables.updateData

-

bigquery.tables.update

-

bigquery.tables.delete

-

bigquery.tables.getData

-
roles/bigquery.dataEditor
+## **Permissions** +| **Command** | Component | Permissions | Recommended Role | +| --------------------------- | ----------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------- | +| **Apply** | BigQuery (source) |

bigquery.jobs.create

bigquery.readsessions.create

bigquery.readsessions.getData

| roles/bigquery.user | +| **Apply** | Datastore (destination) |

datastore.entities.allocateIds

datastore.entities.create

datastore.entities.delete

datastore.entities.get

datastore.entities.list

datastore.entities.update

| roles/datastore.owner | +| **Materialize** | BigQuery (source) | bigquery.jobs.create | roles/bigquery.user | +| **Materialize** | Datastore (destination) |

datastore.entities.allocateIds

datastore.entities.create

datastore.entities.delete

datastore.entities.get

datastore.entities.list

datastore.entities.update

datastore.databases.get

| roles/datastore.owner | +| **Get Online Features** | Datastore | datastore.entities.get | roles/datastore.user | +| **Get Historical Features** | BigQuery (source) |

bigquery.datasets.get

bigquery.tables.get

bigquery.tables.create

bigquery.tables.updateData

bigquery.tables.update

bigquery.tables.delete

bigquery.tables.getData

| roles/bigquery.dataEditor | diff --git a/docs/reference/providers/local.md b/docs/reference/providers/local.md index 52fb18f6fa..a93a3b8b2d 100644 --- a/docs/reference/providers/local.md +++ b/docs/reference/providers/local.md @@ -7,11 +7,10 @@ ## Example -{% code title="feature\_store.yaml" %} +{% code title="feature_store.yaml" %} ```yaml project: my_feature_repo registry: data/registry.db provider: local ``` {% endcode %} - diff --git a/docs/reference/repository-config.md b/docs/reference/repository-config.md deleted file mode 100644 index 128d773071..0000000000 --- a/docs/reference/repository-config.md +++ /dev/null @@ -1,2 +0,0 @@ -# Repository Config - diff --git a/docs/reference/telemetry.md b/docs/reference/telemetry.md deleted file mode 100644 index f8f7678764..0000000000 --- a/docs/reference/telemetry.md +++ /dev/null @@ -1,12 +0,0 @@ -# Telemetry - -### How telemetry is used - -The Feast project logs anonymous usage statistics and errors in order to inform our planning. Several client methods are tracked, beginning in Feast 0.9. Users are assigned a UUID which is sent along with the name of the method, the Feast version, the OS \(using `sys.platform`\), and the current time. - -The [source code](https://github.com/feast-dev/feast/blob/master/sdk/python/feast/telemetry.py) is available here. - -### How to disable telemetry - -Set the environment variable `FEAST_TELEMETRY` to `False`. - diff --git a/docs/repository-config.md b/docs/repository-config.md deleted file mode 100644 index 128d773071..0000000000 --- a/docs/repository-config.md +++ /dev/null @@ -1,2 +0,0 @@ -# Repository Config - diff --git a/docs/roadmap.md b/docs/roadmap.md index a11a9b82ce..3eb181c0da 100644 --- a/docs/roadmap.md +++ b/docs/roadmap.md @@ -1,136 +1,77 @@ # Roadmap -## Backlog - -* Add On-demand transformations support -* Add Data quality monitoring -* Add Snowflake offline store support -* Add Bigtable support -* Add Push/Ingestion API support - -## Scheduled for development \(next 3 months\) - -[Roadmap discussion](https://github.com/feast-dev/feast/issues/1527) - -* Ensure Feast Serving is compatible with the new Feast - * Decouple Feast Serving from Feast Core - * Add FeatureView support to Feast Serving - * Update Helm Charts \(remove Core, Postgres, Job Service, Spark\) -* Add Redis support for Feast -* Add direct deployment support to AWS and GCP -* Add Dynamo support -* Add Redshift support - -## Feast 0.10 - -### **New Functionality** - -1. Full local mode support \(Sqlite and Parquet\) -2. Provider model for added extensibility -3. Firestore support -4. Native \(No-Spark\) BigQuery support -5. Added support for object store based registry -6. Add support for FeatureViews -7. Added support for infrastructure configuration through `apply` - -### **Technical debt, refactoring, or housekeeping** - -1. Remove dependency on Feast Core -2. Feast Serving made optional -3. Moved Python API documentation to Read The Docs -4. Moved Feast Java components to [feast-java](https://github.com/feast-dev/feast-java) -5. Moved Feast Spark components to [feast-spark](https://github.com/feast-dev/feast-spark) - -## Feast 0.9 - -[Discussion](https://github.com/feast-dev/feast/issues/1131) - -### New Functionality - -* Added Feast Job Service for management of ingestion and retrieval jobs -* Added support for [Spark on K8s Operator](https://github.com/GoogleCloudPlatform/spark-on-k8s-operator) as Spark job launcher -* Added Azure deployment and storage support \([\#1241](https://github.com/feast-dev/feast/pull/1241)\) - -Note: Please see discussion thread above for functionality that did not make this release. - -## Feast 0.8 - -[Discussion](https://github.com/feast-dev/feast/issues/1018) - -[Feast 0.8 RFC](https://docs.google.com/document/d/1snRxVb8ipWZjCiLlfkR4Oc28p7Fkv_UXjvxBFWjRBj4/edit#heading=h.yvkhw2cuvx5) - -### **New Functionality** - -1. Add support for AWS \(data sources and deployment\) -2. Add support for local deployment -3. Add support for Spark based ingestion -4. Add support for Spark based historical retrieval - -### **Technical debt, refactoring, or housekeeping** - -1. Move job management functionality to SDK -2. Remove Apache Beam based ingestion -3. Allow direct ingestion from batch sources that does not pass through stream -4. Remove Feast Historical Serving abstraction to allow direct access from Feast SDK to data sources for retrieval - -## Feast 0.7 - -[Discussion](https://github.com/feast-dev/feast/issues/834) - -[GitHub Milestone](https://github.com/feast-dev/feast/milestone/4) - -### **New Functionality** - -1. Label based Ingestion Job selector for Job Controller [\#903](https://github.com/feast-dev/feast/pull/903) -2. Authentication Support for Java & Go SDKs [\#971](https://github.com/feast-dev/feast/pull/971) -3. Automatically Restart Ingestion Jobs on Upgrade [\#949](https://github.com/feast-dev/feast/pull/949) -4. Structured Audit Logging [\#891](https://github.com/feast-dev/feast/pull/891) -5. Request Response Logging support via Fluentd [\#961](https://github.com/feast-dev/feast/pull/961) -6. Feast Core Rest Endpoints [\#878](https://github.com/feast-dev/feast/pull/878) - -### **Technical debt, refactoring, or housekeeping** - -1. Improved integration testing framework [\#886](https://github.com/feast-dev/feast/pull/886) -2. Rectify all flaky batch tests [\#953](https://github.com/feast-dev/feast/pull/953), [\#982](https://github.com/feast-dev/feast/pull/982) -3. Decouple job management from Feast Core [\#951](https://github.com/feast-dev/feast/pull/951) - -## Feast 0.6 - -[Discussion](https://github.com/feast-dev/feast/issues/767) - -[GitHub Milestone](https://github.com/feast-dev/feast/milestone/3) - -### New functionality - -1. Batch statistics and validation [\#612](https://github.com/feast-dev/feast/pull/612) -2. Authentication and authorization [\#554](https://github.com/feast-dev/feast/pull/554) -3. Online feature and entity status metadata [\#658](https://github.com/feast-dev/feast/pull/658) -4. Improved searching and filtering of features and entities -5. Python support for labels [\#663](https://github.com/feast-dev/feast/issues/663) - -### Technical debt, refactoring, or housekeeping - -1. Improved job life cycle management [\#761](https://github.com/feast-dev/feast/issues/761) -2. Compute and write metrics for rows prior to store writes [\#763](https://github.com/feast-dev/feast/pull/763) - -## Feast 0.5 - -[Discussion](https://github.com/feast-dev/feast/issues/527) - -### New functionality - -1. Streaming statistics and validation \(M1 from [Feature Validation RFC](https://docs.google.com/document/d/1TPmd7r4mniL9Y-V_glZaWNo5LMXLshEAUpYsohojZ-8/edit)\) -2. Support for Redis Clusters \([\#478](https://github.com/feast-dev/feast/issues/478), [\#502](https://github.com/feast-dev/feast/issues/502)\) -3. Add feature and feature set labels, i.e. key/value registry metadata \([\#463](https://github.com/feast-dev/feast/issues/463)\) -4. Job management API \([\#302](https://github.com/feast-dev/feast/issues/302)\) - -### Technical debt, refactoring, or housekeeping - -1. Clean up and document all configuration options \([\#525](https://github.com/feast-dev/feast/issues/525)\) -2. Externalize storage interfaces \([\#402](https://github.com/feast-dev/feast/issues/402)\) -3. Reduce memory usage in Redis \([\#515](https://github.com/feast-dev/feast/issues/515)\) -4. Support for handling out of order ingestion \([\#273](https://github.com/feast-dev/feast/issues/273)\) -5. Remove feature versions and enable automatic data migration \([\#386](https://github.com/feast-dev/feast/issues/386)\) \([\#462](https://github.com/feast-dev/feast/issues/462)\) -6. Tracking of batch ingestion by with dataset\_id/job\_id \([\#461](https://github.com/feast-dev/feast/issues/461)\) -7. Write Beam metrics after ingestion to store \(not prior\) \([\#489](https://github.com/feast-dev/feast/issues/489)\) - +The list below contains the functionality that contributors are planning to develop for Feast + +* Items below that are in development (or planned for development) will be indicated in parentheses. +* We welcome contribution to all items in the roadmap! +* Want to influence our roadmap and prioritization? Submit your feedback to [this form](https://docs.google.com/forms/d/e/1FAIpQLSfa1nRQ0sKz-JEFnMMCi4Jseag\_yDssO\_3nV9qMfxfrkil-wA/viewform). +* Want to speak to a Feast contributor? We are more than happy to jump on a call. Please schedule a time using [Calendly](https://calendly.com/d/x2ry-g5bb/meet-with-feast-team). + +* **Data Sources** + * [x] [Snowflake source](https://docs.feast.dev/reference/data-sources/snowflake) + * [x] [Redshift source](https://docs.feast.dev/reference/data-sources/redshift) + * [x] [BigQuery source](https://docs.feast.dev/reference/data-sources/bigquery) + * [x] [Parquet file source](https://docs.feast.dev/reference/data-sources/file) + * [x] [Synapse source (community plugin)](https://github.com/Azure/feast-azure) + * [x] [Hive (community plugin)](https://github.com/baineng/feast-hive) + * [x] [Postgres (community plugin)](https://github.com/nossrannug/feast-postgres) + * [x] [Spark (community plugin)](https://docs.feast.dev/reference/data-sources/spark) + * [x] Kafka / Kinesis sources (via [push support into the online store](https://docs.feast.dev/reference/data-sources/push)) + * [ ] HTTP source +* **Offline Stores** + * [x] [Snowflake](https://docs.feast.dev/reference/offline-stores/snowflake) + * [x] [Redshift](https://docs.feast.dev/reference/offline-stores/redshift) + * [x] [BigQuery](https://docs.feast.dev/reference/offline-stores/bigquery) + * [x] [Synapse (community plugin)](https://github.com/Azure/feast-azure) + * [x] [Hive (community plugin)](https://github.com/baineng/feast-hive) + * [x] [Postgres (community plugin)](https://github.com/nossrannug/feast-postgres) + * [x] [Trino (community plugin)](https://github.com/Shopify/feast-trino) + * [x] [Spark (community plugin)](https://docs.feast.dev/reference/offline-stores/spark) + * [x] [In-memory / Pandas](https://docs.feast.dev/reference/offline-stores/file) + * [x] [Custom offline store support](https://docs.feast.dev/how-to-guides/adding-a-new-offline-store) +* **Online Stores** + * [x] [DynamoDB](https://docs.feast.dev/reference/online-stores/dynamodb) + * [x] [Redis](https://docs.feast.dev/reference/online-stores/redis) + * [x] [Datastore](https://docs.feast.dev/reference/online-stores/datastore) + * [x] [SQLite](https://docs.feast.dev/reference/online-stores/sqlite) + * [x] [Azure Cache for Redis (community plugin)](https://github.com/Azure/feast-azure) + * [x] [Postgres (community plugin)](https://github.com/nossrannug/feast-postgres) + * [x] [Custom online store support](https://docs.feast.dev/how-to-guides/adding-support-for-a-new-online-store) + * [ ] Bigtable (in progress) + * [ ] Cassandra +* **Streaming** + * [x] [Custom streaming ingestion job support](https://docs.feast.dev/how-to-guides/creating-a-custom-provider) + * [x] [Push based streaming data ingestion](https://docs.feast.dev/reference/data-sources/push.md) + * [ ] Streaming ingestion on AWS + * [ ] Streaming ingestion on GCP +* **Feature Engineering** + * [x] On-demand Transformations (Alpha release. See [RFC](https://docs.google.com/document/d/1lgfIw0Drc65LpaxbUu49RCeJgMew547meSJttnUqz7c/edit#)) + * [ ] Batch transformation (In progress. See [RFC](https://docs.google.com/document/d/1964OkzuBljifDvkV-0fakp2uaijnVzdwWNGdz7Vz50A/edit)) + * [ ] Streaming transformation +* **Deployments** + * [x] AWS Lambda (Alpha release. See [RFC](https://docs.google.com/document/d/1eZWKWzfBif66LDN32IajpaG-j82LSHCCOzY6R7Ax7MI/edit)) + * [x] Kubernetes (See [guide](https://docs.feast.dev/how-to-guides/running-feast-in-production#4.3.-java-based-feature-server-deployed-on-kubernetes)) + * [ ] Cloud Run + * [ ] KNative +* **Feature Serving** + * [x] Python Client + * [x] REST Feature Server (Python) (Alpha release. See [RFC](https://docs.google.com/document/d/1iXvFhAsJ5jgAhPOpTdB3j-Wj1S9x3Ev\_Wr6ZpnLzER4/edit)) + * [x] gRPC Feature Server (Java) (See [#1497](https://github.com/feast-dev/feast/issues/1497)) + * [x] Push API + * [ ] Java Client + * [ ] Go Client + * [ ] Delete API + * [ ] Feature Logging (for training) +* **Data Quality Management (See [RFC](https://docs.google.com/document/d/110F72d4NTv80p35wDSONxhhPBqWRwbZXG4f9mNEMd98/edit))** + * [x] Data profiling and validation (Great Expectations) + * [ ] Training-serving skew detection (in progress) + * [ ] Metric production + * [ ] Drift detection +* **Feature Discovery and Governance** + * [x] Python SDK for browsing feature registry + * [x] CLI for browsing feature registry + * [x] Model-centric feature tracking (feature services) + * [x] Amundsen integration (see [Feast extractor](https://github.com/amundsen-io/amundsen/blob/main/databuilder/databuilder/extractor/feast_extractor.py)) + * [x] Feast Web UI (alpha) + * [ ] REST API for browsing feature registry + * [ ] Feature versioning diff --git a/docs/sources.md b/docs/sources.md deleted file mode 100644 index a76d395d09..0000000000 --- a/docs/sources.md +++ /dev/null @@ -1,2 +0,0 @@ -# Sources - diff --git a/docs/specs/datastore_online_example (1) (1).png b/docs/specs/datastore_online_example (1) (1).png new file mode 100644 index 0000000000..9c9b644d66 Binary files /dev/null and b/docs/specs/datastore_online_example (1) (1).png differ diff --git a/docs/specs/datastore_online_example (1).png b/docs/specs/datastore_online_example (1).png new file mode 100644 index 0000000000..9c9b644d66 Binary files /dev/null and b/docs/specs/datastore_online_example (1).png differ diff --git a/docs/specs/offline_store_format.md b/docs/specs/offline_store_format.md index 6826c50190..ac829dd52f 100644 --- a/docs/specs/offline_store_format.md +++ b/docs/specs/offline_store_format.md @@ -7,8 +7,8 @@ One of the design goals of Feast is being able to plug seamlessly into existing Feast provides first class support for the following data warehouses (DWH) to store feature data offline out of the box: * [BigQuery](https://cloud.google.com/bigquery) -* [Snowflake](https://www.snowflake.com/) (Coming Soon) -* [Redshift](https://aws.amazon.com/redshift/) (Coming Soon) +* [Snowflake](https://www.snowflake.com/) +* [Redshift](https://aws.amazon.com/redshift/) The integration between Feast and the DWH is highly configurable, but at the same time there are some non-configurable implications and assumptions that Feast imposes on table schemas and mapping between database-native types and Feast type system. This is what this document is about. @@ -28,14 +28,14 @@ Feature data is stored in tables in the DWH. There is one DWH table per Feast Fe ## Type mappings #### Pandas types -Here's how Feast types map to Pandas types for Feast APIs that take in or return a Pandas dataframe: +Here's how Feast types map to Pandas types for Feast APIs that take in or return a Pandas dataframe: | Feast Type | Pandas Type | |-------------|--| | Event Timestamp | `datetime64[ns]` | | BYTES | `bytes` | | STRING | `str` , `category`| -| INT32 | `int32`, `uint32` | +| INT32 | `int16`, `uint16`, `int32`, `uint32` | | INT64 | `int64`, `uint64` | | UNIX_TIMESTAMP | `datetime64[ns]`, `datetime64[ns, tz]` | | DOUBLE | `float64` | @@ -80,3 +80,17 @@ Here's how Feast types map to BigQuery types when using BigQuery for offline sto | BOOL\_LIST | `ARRAY`| Values that are not specified by the table above will cause an error on conversion. + +#### Snowflake Types +Here's how Feast types map to Snowflake types when using Snowflake for offline storage +See source here: +https://docs.snowflake.com/en/user-guide/python-connector-pandas.html#snowflake-to-pandas-data-mapping + +| Feast Type | Snowflake Python Type | +|-------------|--| +| Event Timestamp | `DATETIME64[NS]` | +| UNIX_TIMESTAMP | `DATETIME64[NS]` | +| STRING | `STR` | +| INT32 | `INT8 / UINT8 / INT16 / UINT16 / INT32 / UINT32` | +| INT64 | `INT64 / UINT64` | +| DOUBLE | `FLOAT64` | diff --git a/docs/specs/redis_online_example (1) (1).png b/docs/specs/redis_online_example (1) (1).png new file mode 100644 index 0000000000..ac426770ba Binary files /dev/null and b/docs/specs/redis_online_example (1) (1).png differ diff --git a/docs/specs/redis_online_example (1).png b/docs/specs/redis_online_example (1).png new file mode 100644 index 0000000000..ac426770ba Binary files /dev/null and b/docs/specs/redis_online_example (1).png differ diff --git a/docs/tutorials/driver-ranking-with-feast.md b/docs/tutorials/driver-ranking-with-feast.md new file mode 100644 index 0000000000..4ad34cd9c0 --- /dev/null +++ b/docs/tutorials/driver-ranking-with-feast.md @@ -0,0 +1,25 @@ +--- +description: >- + Making a prediction using a linear regression model is a common use case in + ML. This model predicts if a driver will complete a trip based on features + ingested into Feast. +--- + +# Driver ranking + +In this example, you'll learn how to use some of the key functionality in Feast. The tutorial runs in both local mode and on the Google Cloud Platform \(GCP\). For GCP, you must have access to a GCP project already, including read and write permissions to BigQuery. + +## [Driver Ranking Example](https://github.com/feast-dev/feast-driver-ranking-tutorial) + +This tutorial guides you on how to use Feast with [Scikit-learn](https://scikit-learn.org/stable/). You will learn how to: + +* Train a model locally \(on your laptop\) using data from [BigQuery](https://cloud.google.com/bigquery/) +* Test the model for online inference using [SQLite](https://www.sqlite.org/index.html) \(for fast iteration\) +* Test the model for online inference using [Firestore](https://firebase.google.com/products/firestore) \(for production use\) + +Try it and let us know what you think! + +| ![](../.gitbook/assets/colab_logo_32px.png)[ Run in Google Colab ](https://colab.research.google.com/github/feast-dev/feast-driver-ranking-tutorial/blob/master/notebooks/Driver_Ranking_Tutorial.ipynb) | ![](../.gitbook/assets/github-mark-32px.png)[ View Source in Github](https://github.com/feast-dev/feast-driver-ranking-tutorial/blob/master/notebooks/Driver_Ranking_Tutorial.ipynb) | +| :--- | :--- | + + diff --git a/docs/tutorials/driver-stats-on-snowflake.md b/docs/tutorials/driver-stats-on-snowflake.md new file mode 100644 index 0000000000..01b158cb1a --- /dev/null +++ b/docs/tutorials/driver-stats-on-snowflake.md @@ -0,0 +1,135 @@ +--- +description: >- + Initial demonstration of Snowflake as an offline store with Feast, using the Snowflake demo template. +--- + +# Drivers stats on Snowflake + +In the steps below, we will set up a sample Feast project that leverages Snowflake +as an offline store. + +Starting with data in a Snowflake table, we will register that table to the feature store and define features associated with the columns in that table. From there, we will generate historical training data based on those feature definitions and then materialize the latest feature values into the online store. Lastly, we will retrieve the materialized feature values. + +Our template will generate new data containing driver statistics. From there, we will show you code snippets that will call to the offline store for generating training datasets, and then the code for calling the online store to serve you the latest feature values to serve models in production. + +## Snowflake Offline Store Example + +#### Install feast-snowflake + +```shell +pip install 'feast[snowflake]' +``` + +#### Get a Snowflake Trial Account (Optional) + +[Snowflake Trial Account](http://trial.snowflake.com) + +#### Create a feature repository + +```shell +feast init -t snowflake {feature_repo_name} +Snowflake Deployment URL (exclude .snowflakecomputing.com): +Snowflake User Name:: +Snowflake Password:: +Snowflake Role Name (Case Sensitive):: +Snowflake Warehouse Name (Case Sensitive):: +Snowflake Database Name (Case Sensitive):: +Should I upload example data to Snowflake (overwrite table)? [Y/n]: Y +cd {feature_repo_name} +``` + +The following files will automatically be created in your project folder: + +* feature_store.yaml -- This is your main configuration file +* driver_repo.py -- This is your main feature definition file +* test.py -- This is a file to test your feature store configuration + +#### Inspect `feature_store.yaml` + +Here you will see the information that you entered. This template will use Snowflake as an offline store and SQLite as the online store. The main thing to remember is by default, Snowflake objects have ALL CAPS names unless lower case was specified. + +{% code title="feature_store.yaml" %} +```yaml +project: ... +registry: ... +provider: local +offline_store: + type: snowflake.offline + account: SNOWFLAKE_DEPLOYMENT_URL #drop .snowflakecomputing.com + user: USERNAME + password: PASSWORD + role: ROLE_NAME #case sensitive + warehouse: WAREHOUSE_NAME #case sensitive + database: DATABASE_NAME #case cap sensitive +``` +{% endcode %} + +#### Run our test python script `test.py` + +```shell +python test.py +``` + +## What we did in `test.py` + +#### Initialize our Feature Store +{% code title="test.py" %} +```python +from datetime import datetime, timedelta + +import pandas as pd +from driver_repo import driver, driver_stats_fv + +from feast import FeatureStore + +fs = FeatureStore(repo_path=".") + +fs.apply([driver, driver_stats_fv]) +``` +{% endcode %} + +#### Create a dummy training dataframe, then call our offline store to add additional columns +{% code title="test.py" %} +```python +entity_df = pd.DataFrame( + { + "event_timestamp": [ + pd.Timestamp(dt, unit="ms", tz="UTC").round("ms") + for dt in pd.date_range( + start=datetime.now() - timedelta(days=3), + end=datetime.now(), + periods=3, + ) + ], + "driver_id": [1001, 1002, 1003], + } +) + +features = ["driver_hourly_stats:conv_rate", "driver_hourly_stats:acc_rate"] + +training_df = fs.get_historical_features( + features=features, entity_df=entity_df +).to_df() +``` +{% endcode %} + +#### Materialize the latest feature values into our online store +{% code title="test.py" %} +```python +fs.materialize_incremental(end_date=datetime.now()) +``` +{% endcode %} + +#### Retrieve the latest values from our online store based on our entity key +{% code title="test.py" %} +```python +online_features = fs.get_online_features( + features=features, + entity_rows=[ + # {join_key: entity_value} + {"driver_id": 1001}, + {"driver_id": 1002} + ], +).to_dict() +``` +{% endcode %} diff --git a/docs/tutorials/fraud-detection.md b/docs/tutorials/fraud-detection.md new file mode 100644 index 0000000000..7bdfde760e --- /dev/null +++ b/docs/tutorials/fraud-detection.md @@ -0,0 +1,29 @@ +--- +description: >- + A common use case in machine learning, this tutorial is an end-to-end, + production-ready fraud prediction system. It predicts in real-time whether a + transaction made by a user is fraudulent. +--- + +# Fraud detection on GCP + +Throughout this tutorial, we’ll walk through the creation of a production-ready fraud prediction system. A prediction is made in real-time as the user makes the transaction, so we need to be able to generate a prediction at low latency. + +## [Fraud Detection Example](https://github.com/feast-dev/feast-fraud-tutorial) + +Our end-to-end example will perform the following workflows: + +* Computing and backfilling feature data from raw data +* Building point-in-time correct training datasets from feature data and training a model +* Making online predictions from feature data + +Here's a high-level picture of our system architecture on Google Cloud Platform \(GCP\): + + + +![](../.gitbook/assets/data-systems-fraud-2x.jpg) + +| ![](../.gitbook/assets/colab_logo_32px.png) [Run in Google Colab](https://colab.research.google.com/github/feast-dev/feast-fraud-tutorial/blob/master/notebooks/Fraud_Detection_Tutorial.ipynb) | ![](../.gitbook/assets/github-mark-32px.png)[ View Source on Github](https://github.com/feast-dev/feast-fraud-tutorial/blob/main/notebooks/Fraud_Detection_Tutorial.ipynb) | +| :--- | :--- | + + diff --git a/docs/tutorials/real-time-credit-scoring-on-aws.md b/docs/tutorials/real-time-credit-scoring-on-aws.md new file mode 100644 index 0000000000..43f8c98133 --- /dev/null +++ b/docs/tutorials/real-time-credit-scoring-on-aws.md @@ -0,0 +1,29 @@ +--- +description: >- + Credit scoring models are used to approve or reject loan applications. In this + tutorial we will build a real-time credit scoring system on AWS. +--- + +# Real-time credit scoring on AWS + +When individuals apply for loans from banks and other credit providers, the decision to approve a loan application is often made through a statistical model. This model uses information about a customer to determine the likelihood that they will repay or default on a loan, in a process called credit scoring. + +In this example, we will demonstrate how a real-time credit scoring system can be built using Feast and Scikit-Learn on AWS, using feature data from S3. + +This real-time system accepts a loan request from a customer and responds within 100ms with a decision on whether their loan has been approved or rejected. + +## [Real-time Credit Scoring Example](https://github.com/feast-dev/real-time-credit-scoring-on-aws-tutorial) + +This end-to-end tutorial will take you through the following steps: + +* Deploying S3 with Parquet as your primary data source, containing both [loan features](https://github.com/feast-dev/real-time-credit-scoring-on-aws-tutorial/blob/22fc6c7272ef033e7ba0afc64ffaa6f6f8fc0277/data/loan_table_sample.csv) and [zip code features](https://github.com/feast-dev/real-time-credit-scoring-on-aws-tutorial/blob/22fc6c7272ef033e7ba0afc64ffaa6f6f8fc0277/data/zipcode_table_sample.csv) +* Deploying Redshift as the interface Feast uses to build training datasets +* Registering your features with Feast and configuring DynamoDB for online serving +* Building a training dataset with Feast to train your credit scoring model +* Loading feature values from S3 into DynamoDB +* Making online predictions with your credit scoring model using features from DynamoDB + +| ![](../.gitbook/assets/github-mark-32px.png)[ View Source on Github](https://github.com/feast-dev/real-time-credit-scoring-on-aws-tutorial) | +| :--- | + + diff --git a/docs/tutorials/tutorials-overview.md b/docs/tutorials/tutorials-overview.md new file mode 100644 index 0000000000..32e64071b0 --- /dev/null +++ b/docs/tutorials/tutorials-overview.md @@ -0,0 +1,13 @@ +# Overview + +These Feast tutorials showcase how to use Feast to simplify end to end model training / serving. + +{% page-ref page="fraud-detection.md" %} + +{% page-ref page="driver-ranking-with-feast.md" %} + +{% page-ref page="real-time-credit-scoring-on-aws.md" %} + +{% page-ref page="driver-stats-on-snowflake.md" %} + +{% page-ref page="validating-historical-features.md" %} diff --git a/docs/tutorials/validating-historical-features.md b/docs/tutorials/validating-historical-features.md new file mode 100644 index 0000000000..addd309902 --- /dev/null +++ b/docs/tutorials/validating-historical-features.md @@ -0,0 +1,916 @@ +# Validating historical features with Great Expectations + +In this tutorial, we will use the public dataset of Chicago taxi trips to present data validation capabilities of Feast. +- The original dataset is stored in BigQuery and consists of raw data for each taxi trip (one row per trip) since 2013. +- We will generate several training datasets (aka historical features in Feast) for different periods and evaluate expectations made on one dataset against another. + +Types of features we're ingesting and generating: +- Features that aggregate raw data with daily intervals (eg, trips per day, average fare or speed for a specific day, etc.). +- Features using SQL while pulling data from BigQuery (like total trips time or total miles travelled). +- Features calculated on the fly when requested using Feast's on-demand transformations + +Our plan: + +0. Prepare environment +1. Pull data from BigQuery (optional) +2. Declare & apply features and feature views in Feast +3. Generate reference dataset +4. Develop & test profiler function +5. Run validation on different dataset using reference dataset & profiler + + +> The original notebook and datasets for this tutorial can be found on [GitHub](https://github.com/feast-dev/dqm-tutorial). + +### 0. Setup + +Install Feast Python SDK and great expectations: + + +```python +!pip install 'feast[ge]' +``` + + +### 1. Dataset preparation (Optional) + +**You can skip this step if you don't have GCP account. Please use parquet files that are coming with this tutorial instead** + + +```python +!pip install google-cloud-bigquery +``` + + +```python +import pyarrow.parquet + +from google.cloud.bigquery import Client +``` + + +```python +bq_client = Client(project='kf-feast') +``` + +Running some basic aggregations while pulling data from BigQuery. Grouping by taxi_id and day: + + +```python +data_query = """SELECT + taxi_id, + TIMESTAMP_TRUNC(trip_start_timestamp, DAY) as day, + SUM(trip_miles) as total_miles_travelled, + SUM(trip_seconds) as total_trip_seconds, + SUM(fare) as total_earned, + COUNT(*) as trip_count +FROM `bigquery-public-data.chicago_taxi_trips.taxi_trips` +WHERE + trip_miles > 0 AND trip_seconds > 60 AND + trip_start_timestamp BETWEEN '2019-01-01' and '2020-12-31' AND + trip_total < 1000 +GROUP BY taxi_id, TIMESTAMP_TRUNC(trip_start_timestamp, DAY)""" +``` + + +```python +driver_stats_table = bq_client.query(data_query).to_arrow() + +# Storing resulting dataset into parquet file +pyarrow.parquet.write_table(driver_stats_table, "trips_stats.parquet") +``` + + +```python +def entities_query(year): + return f"""SELECT + distinct taxi_id +FROM `bigquery-public-data.chicago_taxi_trips.taxi_trips` +WHERE + trip_miles > 0 AND trip_seconds > 0 AND + trip_start_timestamp BETWEEN '{year}-01-01' and '{year}-12-31' +""" +``` + + +```python +entities_2019_table = bq_client.query(entities_query(2019)).to_arrow() + +# Storing entities (taxi ids) into parquet file +pyarrow.parquet.write_table(entities_2019_table, "entities.parquet") +``` + + +## 2. Declaring features + + +```python +import pyarrow.parquet +import pandas as pd + +from feast import FeatureView, Entity, FeatureStore, Field +from feast.types import Float64, Int64 +from feast.value_type import ValueType +from feast.data_format import ParquetFormat +from feast.on_demand_feature_view import on_demand_feature_view +from feast.infra.offline_stores.file_source import FileSource +from feast.infra.offline_stores.file import SavedDatasetFileStorage +from datetime import timedelta + +``` + + +```python +batch_source = FileSource( + timestamp_field="day", + path="trips_stats.parquet", # using parquet file that we created on previous step + file_format=ParquetFormat() +) +``` + + +```python +taxi_entity = Entity(name='taxi', join_keys=['taxi_id']) +``` + + +```python +trips_stats_fv = FeatureView( + name='trip_stats', + entities=['taxi'], + features=[ + Field(name="total_miles_travelled", dtype=Float64), + Field(name="total_trip_seconds", dtype=Float64), + Field(name="total_earned", dtype=Float64), + Field(name="trip_count", dtype=Int64), + + ], + ttl=timedelta(seconds=86400), + source=batch_source, +) +``` + +*Read more about feature views in [Feast docs](https://docs.feast.dev/getting-started/concepts/feature-view)* + + +```python +@on_demand_feature_view( + schema=[ + Field("avg_fare", Float64), + Field("avg_speed", Float64), + Field("avg_trip_seconds", Float64), + Field("earned_per_hour", Float64), + ], + sources={ + "stats": trips_stats_fv + } +) +def on_demand_stats(inp): + out = pd.DataFrame() + out["avg_fare"] = inp["total_earned"] / inp["trip_count"] + out["avg_speed"] = 3600 * inp["total_miles_travelled"] / inp["total_trip_seconds"] + out["avg_trip_seconds"] = inp["total_trip_seconds"] / inp["trip_count"] + out["earned_per_hour"] = 3600 * inp["total_earned"] / inp["total_trip_seconds"] + return out +``` + +*Read more about on demand feature views [here](https://docs.feast.dev/reference/alpha-on-demand-feature-view)* + + +```python +store = FeatureStore(".") # using feature_store.yaml that stored in the same directory +``` + + +```python +store.apply([taxi_entity, trips_stats_fv, on_demand_stats]) # writing to the registry +``` + + +## 3. Generating training (reference) dataset + + +```python +taxi_ids = pyarrow.parquet.read_table("entities.parquet").to_pandas() +``` + +Generating range of timestamps with daily frequency: + + +```python +timestamps = pd.DataFrame() +timestamps["event_timestamp"] = pd.date_range("2019-06-01", "2019-07-01", freq='D') +``` + +Cross merge (aka relation multiplication) produces entity dataframe with each taxi_id repeated for each timestamp: + + +```python +entity_df = pd.merge(taxi_ids, timestamps, how='cross') +entity_df +``` + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
taxi_idevent_timestamp
091d5288487e87c5917b813ba6f75ab1c3a9749af906a2d...2019-06-01
191d5288487e87c5917b813ba6f75ab1c3a9749af906a2d...2019-06-02
291d5288487e87c5917b813ba6f75ab1c3a9749af906a2d...2019-06-03
391d5288487e87c5917b813ba6f75ab1c3a9749af906a2d...2019-06-04
491d5288487e87c5917b813ba6f75ab1c3a9749af906a2d...2019-06-05
.........
1569797ebf27414a0c7b128e7925e1da56d51a8b81484f7630cf...2019-06-27
1569807ebf27414a0c7b128e7925e1da56d51a8b81484f7630cf...2019-06-28
1569817ebf27414a0c7b128e7925e1da56d51a8b81484f7630cf...2019-06-29
1569827ebf27414a0c7b128e7925e1da56d51a8b81484f7630cf...2019-06-30
1569837ebf27414a0c7b128e7925e1da56d51a8b81484f7630cf...2019-07-01
+

156984 rows × 2 columns

+
+ + + +Retrieving historical features for resulting entity dataframe and persisting output as a saved dataset: + + +```python +job = store.get_historical_features( + entity_df=entity_df, + features=[ + "trip_stats:total_miles_travelled", + "trip_stats:total_trip_seconds", + "trip_stats:total_earned", + "trip_stats:trip_count", + "on_demand_stats:avg_fare", + "on_demand_stats:avg_trip_seconds", + "on_demand_stats:avg_speed", + "on_demand_stats:earned_per_hour", + ] +) + +store.create_saved_dataset( + from_=job, + name='my_training_ds', + storage=SavedDatasetFileStorage(path='my_training_ds.parquet') +) +``` + +```python +, full_feature_names = False, tags = {}, _retrieval_job = , min_event_timestamp = 2019-06-01 00:00:00, max_event_timestamp = 2019-07-01 00:00:00)> +``` + + +## 4. Developing dataset profiler + +Dataset profiler is a function that accepts dataset and generates set of its characteristics. This charasteristics will be then used to evaluate (validate) next datasets. + +**Important: datasets are not compared to each other! +Feast use a reference dataset and a profiler function to generate a reference profile. +This profile will be then used during validation of the tested dataset.** + + +```python +import numpy as np + +from feast.dqm.profilers.ge_profiler import ge_profiler + +from great_expectations.core.expectation_suite import ExpectationSuite +from great_expectations.dataset import PandasDataset +``` + + +Loading saved dataset first and exploring the data: + + +```python +ds = store.get_saved_dataset('my_training_ds') +ds.to_df() +``` + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
total_earnedavg_trip_secondstaxi_idtotal_miles_travelledtrip_countearned_per_hourevent_timestamptotal_trip_secondsavg_fareavg_speed
068.252270.00000091d5288487e87c5917b813ba6f75ab1c3a9749af906a2d...24.702.054.1189432019-06-01 00:00:00+00:004540.034.12500019.585903
1221.00560.5000007a4a6162eaf27805aef407d25d5cb21fe779cd962922cb...54.1824.059.1436222019-06-01 00:00:00+00:0013452.09.20833314.499554
2160.501010.769231f4c9d05b215d7cbd08eca76252dae51cdb7aca9651d4ef...41.3013.043.9726032019-06-01 00:00:00+00:0013140.012.34615411.315068
3183.75697.550000c1f533318f8480a59173a9728ea0248c0d3eb187f4b897...37.3020.047.4159562019-06-01 00:00:00+00:0013951.09.1875009.625116
4217.751054.076923455b6b5cae6ca5a17cddd251485f2266d13d6a2c92f07c...69.6913.057.2064512019-06-01 00:00:00+00:0013703.016.75000018.308692
.................................
15697938.001980.0000000cccf0ec1f46d1e0beefcfdeaf5188d67e170cdff92618...14.901.069.0909092019-07-01 00:00:00+00:001980.038.00000027.090909
156980135.00551.250000beefd3462e3f5a8e854942a2796876f6db73ebbd25b435...28.4016.055.1020412019-07-01 00:00:00+00:008820.08.43750011.591837
156981NaNNaN9a3c52aa112f46cf0d129fafbd42051b0fb9b0ff8dcb0e...NaNNaNNaN2019-07-01 00:00:00+00:00NaNNaNNaN
15698263.00815.00000008308c31cd99f495dea73ca276d19a6258d7b4c9c88e43...19.964.069.5705522019-07-01 00:00:00+00:003260.015.75000022.041718
156983NaNNaN7ebf27414a0c7b128e7925e1da56d51a8b81484f7630cf...NaNNaNNaN2019-07-01 00:00:00+00:00NaNNaNNaN
+

156984 rows × 10 columns

+
+ + + +Feast uses [Great Expectations](https://docs.greatexpectations.io/docs/) as a validation engine and [ExpectationSuite](https://legacy.docs.greatexpectations.io/en/latest/autoapi/great_expectations/core/expectation_suite/index.html#great_expectations.core.expectation_suite.ExpectationSuite) as a dataset's profile. Hence, we need to develop a function that will generate ExpectationSuite. This function will receive instance of [PandasDataset](https://legacy.docs.greatexpectations.io/en/latest/autoapi/great_expectations/dataset/index.html?highlight=pandasdataset#great_expectations.dataset.PandasDataset) (wrapper around pandas.DataFrame) so we can utilize both Pandas DataFrame API and some helper functions from PandasDataset during profiling. + + +```python +DELTA = 0.1 # controlling allowed window in fraction of the value on scale [0, 1] + +@ge_profiler +def stats_profiler(ds: PandasDataset) -> ExpectationSuite: + # simple checks on data consistency + ds.expect_column_values_to_be_between( + "avg_speed", + min_value=0, + max_value=60, + mostly=0.99 # allow some outliers + ) + + ds.expect_column_values_to_be_between( + "total_miles_travelled", + min_value=0, + max_value=500, + mostly=0.99 # allow some outliers + ) + + # expectation of means based on observed values + observed_mean = ds.trip_count.mean() + ds.expect_column_mean_to_be_between("trip_count", + min_value=observed_mean * (1 - DELTA), + max_value=observed_mean * (1 + DELTA)) + + observed_mean = ds.earned_per_hour.mean() + ds.expect_column_mean_to_be_between("earned_per_hour", + min_value=observed_mean * (1 - DELTA), + max_value=observed_mean * (1 + DELTA)) + + + # expectation of quantiles + qs = [0.5, 0.75, 0.9, 0.95] + observed_quantiles = ds.avg_fare.quantile(qs) + + ds.expect_column_quantile_values_to_be_between( + "avg_fare", + quantile_ranges={ + "quantiles": qs, + "value_ranges": [[None, max_value] for max_value in observed_quantiles] + }) + + return ds.get_expectation_suite() +``` + +Testing our profiler function: + + +```python +ds.get_profile(profiler=stats_profiler) +``` + 02/02/2022 02:43:47 PM INFO: 5 expectation(s) included in expectation_suite. result_format settings filtered. + + + + +**Verify that all expectations that we coded in our profiler are present here. Otherwise (if you can't find some expectations) it means that it failed to pass on the reference dataset (do it silently is default behavior of Great Expectations).** + +Now we can create validation reference from dataset and profiler function: + + +```python +validation_reference = ds.as_reference(profiler=stats_profiler) +``` + +and test it against our existing retrieval job + + +```python +_ = job.to_df(validation_reference=validation_reference) +``` + + 02/02/2022 02:43:52 PM INFO: 5 expectation(s) included in expectation_suite. result_format settings filtered. + 02/02/2022 02:43:53 PM INFO: Validating data_asset_name None with expectation_suite_name default + + +Validation successfully passed as no exception were raised. + + +### 5. Validating new historical retrieval + +Creating new timestamps for Dec 2020: + + +```python +from feast.dqm.errors import ValidationFailed +``` + + +```python +timestamps = pd.DataFrame() +timestamps["event_timestamp"] = pd.date_range("2020-12-01", "2020-12-07", freq='D') +``` + + +```python +entity_df = pd.merge(taxi_ids, timestamps, how='cross') +entity_df +``` + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
taxi_idevent_timestamp
091d5288487e87c5917b813ba6f75ab1c3a9749af906a2d...2020-12-01
191d5288487e87c5917b813ba6f75ab1c3a9749af906a2d...2020-12-02
291d5288487e87c5917b813ba6f75ab1c3a9749af906a2d...2020-12-03
391d5288487e87c5917b813ba6f75ab1c3a9749af906a2d...2020-12-04
491d5288487e87c5917b813ba6f75ab1c3a9749af906a2d...2020-12-05
.........
354437ebf27414a0c7b128e7925e1da56d51a8b81484f7630cf...2020-12-03
354447ebf27414a0c7b128e7925e1da56d51a8b81484f7630cf...2020-12-04
354457ebf27414a0c7b128e7925e1da56d51a8b81484f7630cf...2020-12-05
354467ebf27414a0c7b128e7925e1da56d51a8b81484f7630cf...2020-12-06
354477ebf27414a0c7b128e7925e1da56d51a8b81484f7630cf...2020-12-07
+

35448 rows × 2 columns

+
+ + +```python +job = store.get_historical_features( + entity_df=entity_df, + features=[ + "trip_stats:total_miles_travelled", + "trip_stats:total_trip_seconds", + "trip_stats:total_earned", + "trip_stats:trip_count", + "on_demand_stats:avg_fare", + "on_demand_stats:avg_trip_seconds", + "on_demand_stats:avg_speed", + "on_demand_stats:earned_per_hour", + ] +) +``` + +Execute retrieval job with validation reference: + + +```python +try: + df = job.to_df(validation_reference=validation_reference) +except ValidationFailed as exc: + print(exc.validation_report) +``` + + 02/02/2022 02:43:58 PM INFO: 5 expectation(s) included in expectation_suite. result_format settings filtered. + 02/02/2022 02:43:59 PM INFO: Validating data_asset_name None with expectation_suite_name default + + [ + { + "expectation_config": { + "expectation_type": "expect_column_mean_to_be_between", + "kwargs": { + "column": "trip_count", + "min_value": 10.387244591346153, + "max_value": 12.695521167200855, + "result_format": "COMPLETE" + }, + "meta": {} + }, + "meta": {}, + "result": { + "observed_value": 6.692920555429092, + "element_count": 35448, + "missing_count": 31055, + "missing_percent": 87.6071992778154 + }, + "exception_info": { + "raised_exception": false, + "exception_message": null, + "exception_traceback": null + }, + "success": false + }, + { + "expectation_config": { + "expectation_type": "expect_column_mean_to_be_between", + "kwargs": { + "column": "earned_per_hour", + "min_value": 52.320624975640214, + "max_value": 63.94743052578249, + "result_format": "COMPLETE" + }, + "meta": {} + }, + "meta": {}, + "result": { + "observed_value": 68.99268345164135, + "element_count": 35448, + "missing_count": 31055, + "missing_percent": 87.6071992778154 + }, + "exception_info": { + "raised_exception": false, + "exception_message": null, + "exception_traceback": null + }, + "success": false + }, + { + "expectation_config": { + "expectation_type": "expect_column_quantile_values_to_be_between", + "kwargs": { + "column": "avg_fare", + "quantile_ranges": { + "quantiles": [ + 0.5, + 0.75, + 0.9, + 0.95 + ], + "value_ranges": [ + [ + null, + 16.4 + ], + [ + null, + 26.229166666666668 + ], + [ + null, + 36.4375 + ], + [ + null, + 42.0 + ] + ] + }, + "result_format": "COMPLETE" + }, + "meta": {} + }, + "meta": {}, + "result": { + "observed_value": { + "quantiles": [ + 0.5, + 0.75, + 0.9, + 0.95 + ], + "values": [ + 19.5, + 28.1, + 38.0, + 44.125 + ] + }, + "element_count": 35448, + "missing_count": 31055, + "missing_percent": 87.6071992778154, + "details": { + "success_details": [ + false, + false, + false, + false + ] + } + }, + "exception_info": { + "raised_exception": false, + "exception_message": null, + "exception_traceback": null + }, + "success": false + } + ] + + +Validation failed since several expectations didn't pass: +* Trip count (mean) decreased more than 10% (which is expected when comparing Dec 2020 vs June 2019) +* Average Fare increased - all quantiles are higher than expected +* Earn per hour (mean) increased more than 10% (most probably due to increased fare) + diff --git a/docs/user-guide/define-and-ingest-features.md b/docs/user-guide/define-and-ingest-features.md deleted file mode 100644 index d55fcb1d85..0000000000 --- a/docs/user-guide/define-and-ingest-features.md +++ /dev/null @@ -1,56 +0,0 @@ -# Define and ingest features - -In order to retrieve features for both training and serving, Feast requires data being ingested into its offline and online stores. - -{% hint style="warning" %} -Feast 0.8 does not have an offline store. Only Online storage support exists currently. Feast 0.9 will have offline storage support. In Feast 0.8, historical data is retrieved directly from batch sources. -{% endhint %} - -Users are expected to already have either a batch or stream source with data stored in it, ready to be ingested into Feast. Once a feature table \(with the corresponding sources\) has been registered with Feast, it is possible to load data from this source into stores. - -The following depicts an example ingestion flow from a data source to the online store. - -### Batch Source to Online Store - -```python -from feast import Client -from datetime import datetime, timedelta - -client = Client(core_url="localhost:6565") -driver_ft = client.get_feature_table("driver_trips") - -# Initialize date ranges -today = datetime.now() -yesterday = today - timedelta(1) - -# Launches a short-lived job that ingests data over the provided date range. -client.start_offline_to_online_ingestion( - driver_ft, yesterday, today -) -``` - -### Stream Source to Online Store - -```python -from feast import Client -from datetime import datetime, timedelta - -client = Client(core_url="localhost:6565") -driver_ft = client.get_feature_table("driver_trips") - -# Launches a long running streaming ingestion job -client.start_stream_to_online_ingestion(driver_ft) -``` - -### Batch Source to Offline Store - -{% hint style="danger" %} -Not supported in Feast 0.8 -{% endhint %} - -### Stream Source to Offline Store - -{% hint style="danger" %} -Not supported in Feast 0.8 -{% endhint %} - diff --git a/docs/user-guide/getting-online-features.md b/docs/user-guide/getting-online-features.md deleted file mode 100644 index c16dc08a01..0000000000 --- a/docs/user-guide/getting-online-features.md +++ /dev/null @@ -1,54 +0,0 @@ -# Getting online features - -Feast provides an API through which online feature values can be retrieved. This allows teams to look up feature values at low latency in production during model serving, in order to make online predictions. - -{% hint style="info" %} -Online stores only maintain the current state of features, i.e latest feature values. No historical data is stored or served. -{% endhint %} - -```python -from feast import Client - -online_client = Client( - core_url="localhost:6565", - serving_url="localhost:6566", -) - -entity_rows = [ - {"driver_id": 1001}, - {"driver_id": 1002}, -] - -# Features in format -feature_refs = [ - "driver_trips:average_daily_rides", - "driver_trips:maximum_daily_rides", - "driver_trips:rating", -] - -response = online_client.get_online_features( - feature_refs=feature_refs, # Contains only feature references - entity_rows=entity_rows, # Contains only entities (driver ids) -) - -# Print features in dictionary format -response_dict = response.to_dict() -print(response_dict) -``` - -The online store must be populated through [ingestion jobs](define-and-ingest-features.md#batch-source-to-online-store) prior to being used for online serving. - -Feast Serving provides a [gRPC API](https://api.docs.feast.dev/grpc/feast.serving.pb.html) that is backed by [Redis](https://redis.io/). We have native clients in [Python](https://api.docs.feast.dev/python/), [Go](https://godoc.org/github.com/gojek/feast/sdk/go), and [Java](https://javadoc.io/doc/dev.feast). - -### Online Field Statuses - -Feast also returns status codes when retrieving features from the Feast Serving API. These status code give useful insight into the quality of data being served. - -| Status | Meaning | -| :--- | :--- | -| NOT\_FOUND | The feature value was not found in the online store. This might mean that no feature value was ingested for this feature. | -| NULL\_VALUE | A entity key was successfully found but no feature values had been set. This status code should not occur during normal operation. | -| OUTSIDE\_MAX\_AGE | The age of the feature row in the online store \(in terms of its event timestamp\) has exceeded the maximum age defined within the feature table. | -| PRESENT | The feature values have been found and are within the maximum age. | -| UNKNOWN | Indicates a system failure. | - diff --git a/docs/user-guide/getting-training-features.md b/docs/user-guide/getting-training-features.md deleted file mode 100644 index a3897b1019..0000000000 --- a/docs/user-guide/getting-training-features.md +++ /dev/null @@ -1,72 +0,0 @@ -# Getting training features - -Feast provides a historical retrieval interface for exporting feature data in order to train machine learning models. Essentially, users are able to enrich their data with features from any feature tables. - -### Retrieving historical features - -Below is an example of the process required to produce a training dataset: - -```python -# Feature references with target feature -feature_refs = [ - "driver_trips:average_daily_rides", - "driver_trips:maximum_daily_rides", - "driver_trips:rating", - "driver_trips:rating:trip_completed", -] - -# Define entity source -entity_source = FileSource( - "event_timestamp", - ParquetFormat(), - "gs://some-bucket/customer" -) - -# Retrieve historical dataset from Feast. -historical_feature_retrieval_job = client.get_historical_features( - feature_refs=feature_refs, - entity_rows=entity_source -) - -output_file_uri = historical_feature_retrieval_job.get_output_file_uri() -``` - -#### 1. Define feature references - -[Feature references](../concepts/glossary.md#feature-references) define the specific features that will be retrieved from Feast. These features can come from multiple feature tables. The only requirement is that the feature tables that make up the feature references have the same entity \(or composite entity\). - -**2. Define an entity dataframe** - -Feast needs to join feature values onto specific entities at specific points in time. Thus, it is necessary to provide an [entity dataframe](../concepts/glossary.md#entity-dataframe) as part of the `get_historical_features` method. In the example above we are defining an entity source. This source is an external file that provides Feast with the entity dataframe. - -**3. Launch historical retrieval job** - -Once the feature references and an entity source are defined, it is possible to call `get_historical_features()`. This method launches a job that extracts features from the sources defined in the provided feature tables, joins them onto the provided entity source, and returns a reference to the training dataset that is produced. - -Please see the [Feast SDK](https://api.docs.feast.dev/python) for more details. - -### Point-in-time Joins - -Feast always joins features onto entity data in a point-in-time correct way. The process can be described through an example. - -In the example below there are two tables \(or dataframes\): - -* The dataframe on the left is the [entity dataframe](../concepts/glossary.md#entity-dataframe) that contains timestamps, entities, and the target variable \(trip\_completed\). This dataframe is provided to Feast through an entity source. -* The dataframe on the right contains driver features. This dataframe is represented in Feast through a feature table and its accompanying data source\(s\). - -The user would like to have the driver features joined onto the entity dataframe to produce a training dataset that contains both the target \(trip\_completed\) and features \(average\_daily\_rides, maximum\_daily\_rides, rating\). This dataset will then be used to train their model. - -![](../.gitbook/assets/point_in_time_join%20%281%29%20%282%29%20%282%29%20%283%29%20%283%29%20%283%29%20%283%29%20%281%29.png) - -Feast is able to intelligently join feature data with different timestamps to a single entity dataframe. It does this through a point-in-time join as follows: - -1. Feast loads the entity dataframe and all feature tables \(driver dataframe\) into the same location. This can either be a database or in memory. -2. For each [entity row](../concepts/glossary.md#entity-rows) in the [entity dataframe](getting-online-features.md), Feast tries to find feature values in each feature table to join to it. Feast extracts the timestamp and entity key of each row in the entity dataframe and scans backward through the feature table until it finds a matching entity key. -3. If the event timestamp of the matching entity key within the driver feature table is within the maximum age configured for the feature table, then the features at that entity key are joined onto the entity dataframe. If the event timestamp is outside of the maximum age, then only null values are returned. -4. If multiple entity keys are found with the same event timestamp, then they are deduplicated by the created timestamp, with newer values taking precedence. -5. Feast repeats this joining process for all feature tables and returns the resulting dataset. - -{% hint style="info" %} -Point-in-time correct joins attempts to prevent the occurrence of feature leakage by trying to recreate the state of the world at a single point in time, instead of joining features based on exact timestamps only. -{% endhint %} - diff --git a/docs/user-guide/overview.md b/docs/user-guide/overview.md deleted file mode 100644 index 2d6eb9981b..0000000000 --- a/docs/user-guide/overview.md +++ /dev/null @@ -1,32 +0,0 @@ -# Overview - -### Using Feast - -Feast development happens through three key workflows: - -1. [Define and load feature data into Feast](define-and-ingest-features.md) -2. [Retrieve historical features for training models](getting-training-features.md) -3. [Retrieve online features for serving models](getting-online-features.md) - -### Defining feature tables and ingesting data into Feast - -Feature creators model the data within their organization into Feast through the definition of [feature tables](../concepts/feature-tables.md) that contain [data sources](../concepts/sources.md). Feature tables are both a schema and a means of identifying data sources for features, and allow Feast to know how to interpret your data, and where to find it. - -After registering a feature table with Feast, users can trigger an ingestion from their data source into Feast. This loads feature values from an upstream data source into Feast stores through ingestion jobs. - -Visit [feature tables](../concepts/feature-tables.md#overview) to learn more about them. - -{% page-ref page="define-and-ingest-features.md" %} - -### Retrieving historical features for training - -In order to generate a training dataset it is necessary to provide both an [entity dataframe ](../concepts/glossary.md#entity-dataframe)and feature references through the[ Feast SDK](https://api.docs.feast.dev/python/) to retrieve historical features. For historical serving, Feast requires that you provide the entities and timestamps for the corresponding feature data. Feast produces a point-in-time correct dataset using the requested features. These features can be requested from an unlimited number of feature sets. - -{% page-ref page="getting-training-features.md" %} - -### Retrieving online features for online serving - -Online retrieval uses feature references through the [Feast Online Serving API](https://api.docs.feast.dev/grpc/feast.serving.pb.html) to retrieve online features. Online serving allows for very low latency requests to feature data at very high throughput. - -{% page-ref page="getting-online-features.md" %} - diff --git a/dump.rdb b/dump.rdb deleted file mode 100644 index a656ac1bba..0000000000 Binary files a/dump.rdb and /dev/null differ diff --git a/examples/java-demo/README.md b/examples/java-demo/README.md new file mode 100644 index 0000000000..b908bb7625 --- /dev/null +++ b/examples/java-demo/README.md @@ -0,0 +1,162 @@ + +# Running Feast Java Server with Redis & calling with python (with registry in GCP) + +For this tutorial, we setup Feast with Redis, using the Feast CLI to register and materialize features, and then retrieving via a Feast Java server deployed in Kubernetes via a gRPC call. +> :point_right: for tips on how to run and debug this locally without using Kubernetes, see [java/serving/README.md](https://github.com/feast-dev/feast/blob/master/java/serving/README.md) + +## First, let's setup a Redis cluster +1. Start minikube (`minikube start`) +2. Use helm to install a default Redis cluster + ```bash + helm repo add bitnami https://charts.bitnami.com/bitnami + helm repo update + helm install my-redis bitnami/redis + ``` + ![](redis-screenshot.png) +3. Port forward Redis so we can materialize features to it + + ```bash + kubectl port-forward --namespace default svc/my-redis-master 6379:6379 + ``` +4. Get your Redis password using the command (pasted below for convenience). We'll need this to tell Feast how to communicate with the cluster. + + ```bash + export REDIS_PASSWORD=$(kubectl get secret --namespace default my-redis -o jsonpath="{.data.redis-password}" | base64 --decode) + echo $REDIS_PASSWORD + ``` + +## Next, we setup a local Feast repo +1. Install Feast with Redis dependencies `pip install "feast[redis]"` +2. Make a bucket in GCS (or S3) +3. The feature repo is already setup here, so you just need to swap in your GCS bucket and Redis credentials. + We need to modify the `feature_store.yaml`, which has two fields for you to replace: + ```yaml + registry: gs://[YOUR BUCKET]/demo-repo/registry.db + project: feast_java_demo + provider: gcp + online_store: + type: redis + connection_string: localhost:6379,password=[YOUR PASSWORD] + offline_store: + type: file + flags: + alpha_features: true + on_demand_transforms: true + ``` +4. Run `feast apply` to apply your local features to the remote registry +5. Materialize features to the online store: + ```bash + CURRENT_TIME=$(date -u +"%Y-%m-%dT%H:%M:%S") + feast materialize-incremental $CURRENT_TIME + ``` + +## Now let's setup the Feast Server +1. Add the gcp-auth addon to mount GCP credentials: + ```bash + minikube addons enable gcp-auth + ``` +3. Add Feast's Java feature server chart repo + ```bash + helm repo add feast-charts https://feast-helm-charts.storage.googleapis.com + helm repo update + ``` +4. Modify the application-override.yaml file to have your credentials + bucket location: + ```yaml + feature-server: + application-override.yaml: + enabled: true + feast: + activeStore: online + stores: + - name: online + type: REDIS + config: + host: my-redis-master + port: 6379 + password: [YOUR PASSWORD] + global: + registry: + path: gs://[YOUR BUCKET]/demo-repo/registry.db + cache_ttl_seconds: 60 + project: feast_java_demo + ``` +5. Install the Feast helm chart: `helm install feast-release feast-charts/feast --values application-override.yaml` +6. (Optional): check logs of the server to make sure it’s working + ```bash + kubectl logs svc/feast-release-feature-server + ``` +7. Port forward to expose the grpc endpoint: + ```bash + kubectl port-forward svc/feast-release-feature-server 6566:6566 + ``` +8. Make a gRPC call: + - Python example + ```bash + python test.py + ``` + - gRPC cli: + + ```bash + grpc_cli call localhost:6566 GetOnlineFeatures ' + features { + val: "driver_hourly_stats:conv_rate" + val: "driver_hourly_stats:acc_rate" + } + entities { + key: "driver_id" + value { + val { + int64_val: 1001 + } + val { + int64_val: 1002 + } + } + }' + ``` + + - Response: + + ```bash + connecting to localhost:6566 + metadata { + feature_names { + val: "driver_hourly_stats:conv_rate" + val: "driver_hourly_stats:acc_rate" + } + } + results { + values { + float_val: 0.812357187 + } + values { + float_val: 0.379484832 + } + statuses: PRESENT + statuses: PRESENT + event_timestamps { + seconds: 1631725200 + } + event_timestamps { + seconds: 1631725200 + } + } + results { + values { + float_val: 0.840873241 + } + values { + float_val: 0.151376978 + } + statuses: PRESENT + statuses: PRESENT + event_timestamps { + seconds: 1631725200 + } + event_timestamps { + seconds: 1631725200 + } + } + Rpc succeeded with OK status + + ``` \ No newline at end of file diff --git a/sdk/python/feast/protos/__init__.py b/examples/java-demo/feature_repo/__init__.py similarity index 100% rename from sdk/python/feast/protos/__init__.py rename to examples/java-demo/feature_repo/__init__.py diff --git a/examples/java-demo/feature_repo/application-override.yaml b/examples/java-demo/feature_repo/application-override.yaml new file mode 100644 index 0000000000..dbdeda4c04 --- /dev/null +++ b/examples/java-demo/feature_repo/application-override.yaml @@ -0,0 +1,17 @@ +feature-server: + application-override.yaml: + enabled: true + feast: + activeStore: online + stores: + - name: online + type: REDIS + config: + host: my-redis-master + port: 6379 + password: [YOUR PASSWORD] +global: + registry: + path: gs://[YOUR BUCKET]/demo-repo/registry.db + cache_ttl_seconds: 60 + project: feast_java_demo diff --git a/examples/java-demo/feature_repo/data/driver_stats_with_string.parquet b/examples/java-demo/feature_repo/data/driver_stats_with_string.parquet new file mode 100644 index 0000000000..83b8c31aa5 Binary files /dev/null and b/examples/java-demo/feature_repo/data/driver_stats_with_string.parquet differ diff --git a/examples/java-demo/feature_repo/driver_repo.py b/examples/java-demo/feature_repo/driver_repo.py new file mode 100644 index 0000000000..ce9469647f --- /dev/null +++ b/examples/java-demo/feature_repo/driver_repo.py @@ -0,0 +1,71 @@ +import pandas as pd +from feast.data_source import RequestSource +from feast.field import Field +from feast.on_demand_feature_view import on_demand_feature_view +from feast.request_feature_view import RequestFeatureView +from feast.types import Float32, Float64, Int64, String +from google.protobuf.duration_pb2 import Duration +from feast.field import Field + +from feast import Entity, Feature, FeatureView, FileSource, ValueType + +driver_hourly_stats = FileSource( + path="data/driver_stats_with_string.parquet", + timestamp_field="event_timestamp", + created_timestamp_column="created", +) +driver = Entity(name="driver_id", value_type=ValueType.INT64, description="driver id",) +driver_hourly_stats_view = FeatureView( + name="driver_hourly_stats", + entities=["driver_id"], + ttl=Duration(seconds=86400000), + schema=[ + Field(name="conv_rate", dtype=Float32), + Field(name="acc_rate", dtype=Float32), + Field(name="avg_daily_trips", dtype=Int64), + Field(name="string_feature", dtype=String), + ], + online=True, + batch_source=driver_hourly_stats, + tags={}, +) + +# Define a request data source which encodes features / information only +# available at request time (e.g. part of the user initiated HTTP request) +input_request = RequestSource( + name="vals_to_add", + schema=[ + Field(name="val_to_add", dtype=Int64), + Field(name="val_to_add_2", dtype=Int64), + ], +) + +# Define an on demand feature view which can generate new features based on +# existing feature views and RequestSource features +@on_demand_feature_view( + inputs={ + "driver_hourly_stats": driver_hourly_stats_view, + "vals_to_add": input_request, + }, + schema=[ + Field(name="conv_rate_plus_val1", dtype=Float64), + Field(name="conv_rate_plus_val2", dtype=Float64), + ], +) +def transformed_conv_rate(inputs: pd.DataFrame) -> pd.DataFrame: + df = pd.DataFrame() + df["conv_rate_plus_val1"] = inputs["conv_rate"] + inputs["val_to_add"] + df["conv_rate_plus_val2"] = inputs["conv_rate"] + inputs["val_to_add_2"] + return df + + +# Define request feature view +driver_age_request_fv = RequestFeatureView( + name="driver_age", + request_data_source=RequestSource( + name="driver_age", + schema=[ + Field(name="driver_age", dtype=Int64), + ], + ), +) diff --git a/examples/java-demo/feature_repo/feature_store.yaml b/examples/java-demo/feature_repo/feature_store.yaml new file mode 100644 index 0000000000..91c65b512a --- /dev/null +++ b/examples/java-demo/feature_repo/feature_store.yaml @@ -0,0 +1,11 @@ +registry: gs://[YOUR BUCKET]/demo-repo/registry.db +project: feast_java_demo +provider: gcp +online_store: + type: redis + connection_string: localhost:6379,password=[YOUR PASSWORD] +offline_store: + type: file +flags: + alpha_features: true + on_demand_transforms: true diff --git a/examples/java-demo/feature_repo/test.py b/examples/java-demo/feature_repo/test.py new file mode 100644 index 0000000000..f73883019d --- /dev/null +++ b/examples/java-demo/feature_repo/test.py @@ -0,0 +1,28 @@ +import grpc +from feast.protos.feast.serving.ServingService_pb2 import ( + FeatureList, + GetOnlineFeaturesRequest, +) +from feast.protos.feast.serving.ServingService_pb2_grpc import ServingServiceStub +from feast.protos.feast.types.Value_pb2 import RepeatedValue, Value + + +# Sample logic to fetch from a local gRPC java server deployed at 6566 +def fetch_java(): + channel = grpc.insecure_channel("localhost:6566") + stub = ServingServiceStub(channel) + feature_refs = FeatureList(val=["driver_hourly_stats:conv_rate"]) + entity_rows = { + "driver_id": RepeatedValue( + val=[Value(int64_val=driver_id) for driver_id in range(1001, 1003)] + ) + } + + print( + stub.GetOnlineFeatures( + GetOnlineFeaturesRequest(features=feature_refs, entities=entity_rows,) + ) + ) + +if __name__ == "__main__": + fetch_java() diff --git a/examples/java-demo/redis-screenshot.png b/examples/java-demo/redis-screenshot.png new file mode 100644 index 0000000000..489deb699d Binary files /dev/null and b/examples/java-demo/redis-screenshot.png differ diff --git a/examples/quickstart/quickstart.ipynb b/examples/quickstart/quickstart.ipynb new file mode 100644 index 0000000000..60974d2751 --- /dev/null +++ b/examples/quickstart/quickstart.ipynb @@ -0,0 +1,901 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "name": "Feast Codelab", + "provenance": [], + "collapsed_sections": [] + }, + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "language_info": { + "name": "python" + } + }, + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "p5JTeKfCVBZf" + }, + "source": [ + "# Overview\n", + "\n", + "In this tutorial, we use feature stores to generate training data and power online model inference for a ride-sharing driver satisfaction prediction model. Feast addresses several common issues in this flow:\n", + "1. **Training-serving skew and complex data joins:** Feature values often exist across multiple tables. Joining these datasets can be complicated, slow, and error-prone.\n", + " - Feast joins these tables with battle-tested logic that ensures *point-in-time* correctness so future feature values do not leak to models.\n", + " - Feast alerts users to offline / online skew with data quality monitoring. \n", + "2. **Online feature availability:** At inference time, models often need access to features that aren't readily available and need to be precomputed from other datasources. \n", + " - Feast manages deployment to a variety of online stores (e.g. DynamoDB, Redis, Google Cloud Datastore) and ensures necessary features are consistently *available* and *freshly computed* at inference time.\n", + "3. **Feature reusability and model versioning:** Different teams within an organization are often unable to reuse features across projects, resulting in duplicate feature creation logic. Models have data dependencies that need to be versioned, for example when running A/B tests on model versions.\n", + " - Feast enables discovery of and collaboration on previously used features and enables versioning of sets of features (via *feature services*). \n", + " - Feast enables feature transformation so users can re-use transformation logic across online / offline usecases and across models.\n", + "\n", + "We will:\n", + "- Deploy a local feature store with a Parquet file offline store and Sqlite online store.\n", + "- Build a training dataset using our time series features from our Parquet files.\n", + "- Materialize feature values from the offline store into the online store in preparation for low latency serving.\n", + "- Read the latest features from the online store for inference." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "9_Y997DzvOMI" + }, + "source": [ + "## Step 1: Install Feast\n", + "\n", + "Install Feast (and Pygments for pretty printing) using pip:\n" + ] + }, + { + "cell_type": "code", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "rXNMAAJKQPG5", + "outputId": "52297709-380b-4200-8e7c-3d0102a82ea4" + }, + "source": [ + "%%sh\n", + "pip install feast -U -q\n", + "pip install Pygments -q\n", + "echo \"Please restart your runtime now (Runtime -> Restart runtime). This ensures that the correct dependencies are loaded.\"" + ], + "execution_count": 1, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Please restart your runtime now (Runtime -> Restart runtime). This ensures that the correct dependencies are loaded.\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "collapsed": false, + "id": "sOX_LwjaAhKz" + }, + "source": [ + "**Reminder**: Please restart your runtime after installing Feast (Runtime -> Restart runtime). This ensures that the correct dependencies are loaded.\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "OZetvs5xx4GP" + }, + "source": [ + "## Step 2: Create a feature repository\n", + "\n", + "A feature repository is a directory that contains the configuration of the feature store and individual features. This configuration is written as code (Python/YAML) and it's highly recommended that teams track it centrally using git. See [Feature Repository](https://docs.feast.dev/reference/feature-repository) for a detailed explanation of feature repositories.\n", + "\n", + "The easiest way to create a new feature repository to use the `feast init` command. This creates a scaffolding with initial demo data.\n", + "\n", + "### Demo data scenario \n", + "- We have surveyed some drivers for how satisfied they are with their experience in a ride-sharing app. \n", + "- We want to generate predictions for driver satisfaction for the rest of the users so we can reach out to potentially dissatisfied users." + ] + }, + { + "cell_type": "code", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "IhirSkgUvYau", + "outputId": "df90af1a-06bd-48a1-94e6-7def19e87d5f" + }, + "source": [ + "!feast init feature_repo" + ], + "execution_count": 1, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Feast is an open source project that collects anonymized error reporting and usage statistics. To opt out or learn more see https://docs.feast.dev/reference/usage\n", + "\n", + "Creating a new Feast repository in \u001b[1m\u001b[32m/content/feature_repo\u001b[0m.\n", + "\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "OdTASZPvyKCe" + }, + "source": [ + "### Step 2a: Inspecting the feature repository\n", + "\n", + "Let's take a look at the demo repo itself. It breaks down into\n", + "\n", + "\n", + "* `data/` contains raw demo parquet data\n", + "* `example.py` contains demo feature definitions\n", + "* `feature_store.yaml` contains a demo setup configuring where data sources are\n", + "\n" + ] + }, + { + "cell_type": "code", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "9jXuzt4ovzA3", + "outputId": "bff15f0c-9f8e-4a3c-e605-5ad84be30709" + }, + "source": [ + "%cd feature_repo\n", + "!ls -R" + ], + "execution_count": 2, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "/content/feature_repo\n", + ".:\n", + "data example.py feature_store.yaml __init__.py\n", + "\n", + "./data:\n", + "driver_stats.parquet\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "MJk_WNsbeUP6" + }, + "source": [ + "### Step 2b: Inspecting the project configuration\n", + "Let's inspect the setup of the project in `feature_store.yaml`. The key line defining the overall architecture of the feature store is the **provider**. This defines where the raw data exists (for generating training data & feature values for serving), and where to materialize feature values to in the online store (for serving). \n", + "\n", + "Valid values for `provider` in `feature_store.yaml` are:\n", + "\n", + "* local: use file source with SQLite/Redis\n", + "* gcp: use BigQuery/Snowflake with Google Cloud Datastore/Redis\n", + "* aws: use Redshift/Snowflake with DynamoDB/Redis\n", + "\n", + "Note that there are many other sources Feast works with, including Azure, Hive, Trino, and PostgreSQL via community plugins. See https://docs.feast.dev/getting-started/third-party-integrations for all supported datasources.\n", + "A custom setup can also be made by following https://docs.feast.dev/v/master/how-to-guides/creating-a-custom-provider" + ] + }, + { + "cell_type": "code", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "9_YJ--uYdtcP", + "outputId": "89268e31-6be0-43fb-e576-6d335a2c1dd9" + }, + "source": [ + "!pygmentize feature_store.yaml" + ], + "execution_count": 3, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "\u001b[94mproject\u001b[39;49;00m: feature_repo\n", + "\u001b[94mregistry\u001b[39;49;00m: data/registry.db\n", + "\u001b[94mprovider\u001b[39;49;00m: local\n", + "\u001b[94monline_store\u001b[39;49;00m:\n", + " \u001b[94mpath\u001b[39;49;00m: data/online_store.db\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "FnMlk4zshywp" + }, + "source": [ + "### Inspecting the raw data\n", + "\n", + "The raw feature data we have in this demo is stored in a local parquet file. The dataset captures hourly stats of a driver in a ride-sharing app." + ] + }, + { + "cell_type": "code", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 424 + }, + "id": "sIF2lO59dwzi", + "outputId": "80e798d5-df21-4ebd-de1c-9bde282bd742" + }, + "source": [ + "import pandas as pd\n", + "\n", + "pd.read_parquet(\"data/driver_stats.parquet\")" + ], + "execution_count": 4, + "outputs": [ + { + "output_type": "execute_result", + "data": { + "text/plain": [ + " event_timestamp driver_id conv_rate acc_rate \\\n", + "0 2022-03-31 14:00:00+00:00 1005 0.313336 0.231481 \n", + "1 2022-03-31 15:00:00+00:00 1005 0.959499 0.942614 \n", + "2 2022-03-31 16:00:00+00:00 1005 0.231786 0.313516 \n", + "3 2022-03-31 17:00:00+00:00 1005 0.886911 0.531613 \n", + "4 2022-03-31 18:00:00+00:00 1005 0.574945 0.718223 \n", + "... ... ... ... ... \n", + "1802 2022-04-15 12:00:00+00:00 1001 0.521622 0.266667 \n", + "1803 2022-04-15 13:00:00+00:00 1001 0.003188 0.535501 \n", + "1804 2021-04-12 07:00:00+00:00 1001 0.709081 0.823138 \n", + "1805 2022-04-08 02:00:00+00:00 1003 0.033297 0.053268 \n", + "1806 2022-04-08 02:00:00+00:00 1003 0.033297 0.053268 \n", + "\n", + " avg_daily_trips created \n", + "0 303 2022-04-15 14:34:10.056 \n", + "1 842 2022-04-15 14:34:10.056 \n", + "2 782 2022-04-15 14:34:10.056 \n", + "3 634 2022-04-15 14:34:10.056 \n", + "4 441 2022-04-15 14:34:10.056 \n", + "... ... ... \n", + "1802 406 2022-04-15 14:34:10.056 \n", + "1803 593 2022-04-15 14:34:10.056 \n", + "1804 997 2022-04-15 14:34:10.056 \n", + "1805 534 2022-04-15 14:34:10.056 \n", + "1806 534 2022-04-15 14:34:10.056 \n", + "\n", + "[1807 rows x 6 columns]" + ], + "text/html": [ + "\n", + "
\n", + "
\n", + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
event_timestampdriver_idconv_rateacc_rateavg_daily_tripscreated
02022-03-31 14:00:00+00:0010050.3133360.2314813032022-04-15 14:34:10.056
12022-03-31 15:00:00+00:0010050.9594990.9426148422022-04-15 14:34:10.056
22022-03-31 16:00:00+00:0010050.2317860.3135167822022-04-15 14:34:10.056
32022-03-31 17:00:00+00:0010050.8869110.5316136342022-04-15 14:34:10.056
42022-03-31 18:00:00+00:0010050.5749450.7182234412022-04-15 14:34:10.056
.....................
18022022-04-15 12:00:00+00:0010010.5216220.2666674062022-04-15 14:34:10.056
18032022-04-15 13:00:00+00:0010010.0031880.5355015932022-04-15 14:34:10.056
18042021-04-12 07:00:00+00:0010010.7090810.8231389972022-04-15 14:34:10.056
18052022-04-08 02:00:00+00:0010030.0332970.0532685342022-04-15 14:34:10.056
18062022-04-08 02:00:00+00:0010030.0332970.0532685342022-04-15 14:34:10.056
\n", + "

1807 rows × 6 columns

\n", + "
\n", + " \n", + " \n", + " \n", + "\n", + " \n", + "
\n", + "
\n", + " " + ] + }, + "metadata": {}, + "execution_count": 4 + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "rRL8-ubWzUFy" + }, + "source": [ + "## Step 3: Register feature definitions and deploy your feature store\n", + "\n", + "`feast apply` scans python files in the current directory for feature/entity definitions and deploys infrastructure according to `feature_store.yaml`.\n", + "\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "5NS4INL5n7ze" + }, + "source": [ + "### Step 3a: Inspecting feature definitions\n", + "Let's inspect what `example.py` looks like (the only python file in the repo):" + ] + }, + { + "cell_type": "code", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "DPqXCoNpL0SX", + "outputId": "be1308b2-0c83-4dd3-eb88-e79ffcbd20d6" + }, + "source": [ + "!pygmentize -f terminal16m example.py" + ], + "execution_count": 5, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "\u001b[38;2;64;128;128;03m# This is an example feature definition file\u001b[39;00m\n", + "\n", + "\u001b[38;2;0;128;0;01mfrom\u001b[39;00m \u001b[38;2;0;0;255;01mdatetime\u001b[39;00m \u001b[38;2;0;128;0;01mimport\u001b[39;00m timedelta\n", + "\n", + "\u001b[38;2;0;128;0;01mfrom\u001b[39;00m \u001b[38;2;0;0;255;01mfeast\u001b[39;00m \u001b[38;2;0;128;0;01mimport\u001b[39;00m Entity, FeatureView, Field, FileSource, ValueType\n", + "\u001b[38;2;0;128;0;01mfrom\u001b[39;00m \u001b[38;2;0;0;255;01mfeast\u001b[39;00m\u001b[38;2;0;0;255;01m.\u001b[39;00m\u001b[38;2;0;0;255;01mtypes\u001b[39;00m \u001b[38;2;0;128;0;01mimport\u001b[39;00m Float32, Int64\n", + "\n", + "\u001b[38;2;64;128;128;03m# Read data from parquet files. Parquet is convenient for local development mode. For\u001b[39;00m\n", + "\u001b[38;2;64;128;128;03m# production, you can use your favorite DWH, such as BigQuery. See Feast documentation\u001b[39;00m\n", + "\u001b[38;2;64;128;128;03m# for more info.\u001b[39;00m\n", + "driver_hourly_stats \u001b[38;2;102;102;102m=\u001b[39m FileSource(\n", + " path\u001b[38;2;102;102;102m=\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m\u001b[38;2;186;33;33m/content/feature_repo/data/driver_stats.parquet\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m,\n", + " timestamp_field\u001b[38;2;102;102;102m=\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m\u001b[38;2;186;33;33mevent_timestamp\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m,\n", + " created_timestamp_column\u001b[38;2;102;102;102m=\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m\u001b[38;2;186;33;33mcreated\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m,\n", + ")\n", + "\n", + "\u001b[38;2;64;128;128;03m# Define an entity for the driver. You can think of entity as a primary key used to\u001b[39;00m\n", + "\u001b[38;2;64;128;128;03m# fetch features.\u001b[39;00m\n", + "driver \u001b[38;2;102;102;102m=\u001b[39m Entity(name\u001b[38;2;102;102;102m=\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m\u001b[38;2;186;33;33mdriver\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m, value_type\u001b[38;2;102;102;102m=\u001b[39mValueType\u001b[38;2;102;102;102m.\u001b[39mINT64, join_key\u001b[38;2;102;102;102m=\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m\u001b[38;2;186;33;33mdriver_id\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m,)\n", + "\n", + "\u001b[38;2;64;128;128;03m# Our parquet files contain sample data that includes a driver_id column, timestamps and\u001b[39;00m\n", + "\u001b[38;2;64;128;128;03m# three feature column. Here we define a Feature View that will allow us to serve this\u001b[39;00m\n", + "\u001b[38;2;64;128;128;03m# data to our model online.\u001b[39;00m\n", + "driver_hourly_stats_view \u001b[38;2;102;102;102m=\u001b[39m FeatureView(\n", + " name\u001b[38;2;102;102;102m=\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m\u001b[38;2;186;33;33mdriver_hourly_stats\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m,\n", + " entities\u001b[38;2;102;102;102m=\u001b[39m[\u001b[38;2;186;33;33m\"\u001b[39m\u001b[38;2;186;33;33mdriver\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m],\n", + " ttl\u001b[38;2;102;102;102m=\u001b[39mtimedelta(days\u001b[38;2;102;102;102m=\u001b[39m\u001b[38;2;102;102;102m1\u001b[39m),\n", + " schema\u001b[38;2;102;102;102m=\u001b[39m[\n", + " Field(name\u001b[38;2;102;102;102m=\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m\u001b[38;2;186;33;33mconv_rate\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m, dtype\u001b[38;2;102;102;102m=\u001b[39mFloat32),\n", + " Field(name\u001b[38;2;102;102;102m=\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m\u001b[38;2;186;33;33macc_rate\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m, dtype\u001b[38;2;102;102;102m=\u001b[39mFloat32),\n", + " Field(name\u001b[38;2;102;102;102m=\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m\u001b[38;2;186;33;33mavg_daily_trips\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m, dtype\u001b[38;2;102;102;102m=\u001b[39mInt64),\n", + " ],\n", + " online\u001b[38;2;102;102;102m=\u001b[39m\u001b[38;2;0;128;0;01mTrue\u001b[39;00m,\n", + " source\u001b[38;2;102;102;102m=\u001b[39mdriver_hourly_stats,\n", + " tags\u001b[38;2;102;102;102m=\u001b[39m{},\n", + ")\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "im_cc5HdoDno" + }, + "source": [ + "### Step 3b: Applying feature definitions\n", + "Now we run `feast apply` to register the feature views and entities defined in `example.py`, and sets up SQLite online store tables. Note that we had previously specified SQLite as the online store in `feature_store.yaml` by specifying a `local` provider." + ] + }, + { + "cell_type": "code", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "RYKCKKrcxYZG", + "outputId": "9745d7eb-b4b8-4a43-bf47-189bbf07ae09" + }, + "source": [ + "!feast apply" + ], + "execution_count": 6, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Created entity \u001b[1m\u001b[32mdriver_id\u001b[0m\n", + "Created feature view \u001b[1m\u001b[32mdriver_hourly_stats\u001b[0m\n", + "\n", + "Created sqlite table \u001b[1m\u001b[32mfeature_repo_driver_hourly_stats\u001b[0m\n", + "\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "uV7rtRQgzyf0" + }, + "source": [ + "## Step 4: Generate training data\n", + "\n", + "To train a model, we need features and labels. Often, this label data is stored separately (e.g. you have one table storing user survey results and another set of tables with feature values). \n", + "\n", + "The user can query that table of labels with timestamps and pass that into Feast as an *entity dataframe* for training data generation. In many cases, Feast will also intelligently join relevant tables to create the relevant feature vectors.\n", + "- Note that we include timestamps because want the features for the same driver at various timestamps to be used in a model." + ] + }, + { + "cell_type": "code", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "C6Fzia7YwBzz", + "outputId": "b99aedae-9c47-4b9f-acdd-cd02e2e091b7" + }, + "source": [ + "from datetime import datetime, timedelta\n", + "import pandas as pd\n", + "\n", + "from feast import FeatureStore\n", + "\n", + "# The entity dataframe is the dataframe we want to enrich with feature values\n", + "entity_df = pd.DataFrame.from_dict(\n", + " {\n", + " \"driver_id\": [1001, 1002, 1003],\n", + " \"label_driver_reported_satisfaction\": [1, 5, 3], \n", + " \"event_timestamp\": [\n", + " datetime.now() - timedelta(minutes=11),\n", + " datetime.now() - timedelta(minutes=36),\n", + " datetime.now() - timedelta(minutes=73),\n", + " ],\n", + " }\n", + ")\n", + "\n", + "store = FeatureStore(repo_path=\".\")\n", + "\n", + "training_df = store.get_historical_features(\n", + " entity_df=entity_df,\n", + " features=[\n", + " \"driver_hourly_stats:conv_rate\",\n", + " \"driver_hourly_stats:acc_rate\",\n", + " \"driver_hourly_stats:avg_daily_trips\",\n", + " ],\n", + ").to_df()\n", + "\n", + "print(\"----- Feature schema -----\\n\")\n", + "print(training_df.info())\n", + "\n", + "print()\n", + "print(\"----- Example features -----\\n\")\n", + "print(training_df.head())" + ], + "execution_count": 7, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "----- Feature schema -----\n", + "\n", + "\n", + "Int64Index: 3 entries, 720 to 1081\n", + "Data columns (total 6 columns):\n", + " # Column Non-Null Count Dtype \n", + "--- ------ -------------- ----- \n", + " 0 driver_id 3 non-null int64 \n", + " 1 label_driver_reported_satisfaction 3 non-null int64 \n", + " 2 event_timestamp 3 non-null datetime64[ns, UTC]\n", + " 3 conv_rate 3 non-null float32 \n", + " 4 acc_rate 3 non-null float32 \n", + " 5 avg_daily_trips 3 non-null int32 \n", + "dtypes: datetime64[ns, UTC](1), float32(2), int32(1), int64(2)\n", + "memory usage: 132.0 bytes\n", + "None\n", + "\n", + "----- Example features -----\n", + "\n", + " driver_id label_driver_reported_satisfaction \\\n", + "720 1002 5 \n", + "359 1001 1 \n", + "1081 1003 3 \n", + "\n", + " event_timestamp conv_rate acc_rate avg_daily_trips \n", + "720 2022-04-15 13:58:30.900257+00:00 0.368052 0.417923 346 \n", + "359 2022-04-15 14:23:30.900240+00:00 0.003188 0.535501 593 \n", + "1081 2022-04-15 13:21:30.900260+00:00 0.214944 0.788695 904 \n" + ] + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "ngl7HCtmz3hG" + }, + "source": [ + "## Step 5: Load features into your online store" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "KCXUpiQ_pmDk" + }, + "source": [ + "### Step 5a: Using `feast materialize-incremental`\n", + "\n", + "We now serialize the latest values of features since the beginning of time to prepare for serving (note: `materialize-incremental` serializes all new features since the last `materialize` call)." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "7Z6QxIebAhK5", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "d38a0e0a-2802-4408-ab92-a26baf82752e" + }, + "source": [ + "from datetime import datetime\n", + "!feast materialize-incremental {datetime.now().isoformat()}" + ], + "execution_count": 8, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Materializing \u001b[1m\u001b[32m1\u001b[0m feature views to \u001b[1m\u001b[32m2022-04-15 14:34:37+00:00\u001b[0m into the \u001b[1m\u001b[32msqlite\u001b[0m online store.\n", + "\n", + "\u001b[1m\u001b[32mdriver_hourly_stats\u001b[0m from \u001b[1m\u001b[32m2022-04-14 14:34:38+00:00\u001b[0m to \u001b[1m\u001b[32m2022-04-15 14:34:37+00:00\u001b[0m:\n", + "100%|████████████████████████████████████████████████████████████████| 5/5 [00:00<00:00, 434.17it/s]\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "l7t12bhH4i9H" + }, + "source": [ + "### Step 5b: Inspect materialized features\n", + "\n", + "Note that now there are `online_store.db` and `registry.db`, which store the materialized features and schema information, respectively." + ] + }, + { + "cell_type": "code", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "aVIgSYhI4cvR", + "outputId": "eada224a-bd86-4933-f0ce-5e66870918cc" + }, + "source": [ + "print(\"--- Data directory ---\")\n", + "!ls data\n", + "\n", + "import sqlite3\n", + "import pandas as pd\n", + "con = sqlite3.connect(\"data/online_store.db\")\n", + "print(\"\\n--- Schema of online store ---\")\n", + "print(\n", + " pd.read_sql_query(\n", + " \"SELECT * FROM feature_repo_driver_hourly_stats\", con).columns.tolist())\n", + "con.close()" + ], + "execution_count": 9, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "--- Data directory ---\n", + "driver_stats.parquet online_store.db registry.db\n", + "\n", + "--- Schema of online store ---\n", + "['entity_key', 'feature_name', 'value', 'event_ts', 'created_ts']\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "AWcttaGalzAm" + }, + "source": [ + "### Quick note on entity keys\n", + "Note from the above command that the online store indexes by `entity_key`. \n", + "\n", + "[Entity keys](https://docs.feast.dev/getting-started/concepts/entity#entity-key) include a list of all entities needed (e.g. all relevant primary keys) to generate the feature vector. In this case, this is a serialized version of the `driver_id`. We use this later to fetch all features for a given driver at inference time." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "GNecKOaI0J2Z" + }, + "source": [ + "## Step 6: Fetching feature vectors for inference\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "TBFlKRsOAhK8" + }, + "source": [ + "At inference time, we need to quickly read the latest feature values for different drivers (which otherwise might have existed only in batch sources) from the online feature store using `get_online_features()`. These feature vectors can then be fed to the model." + ] + }, + { + "cell_type": "code", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "a-PUsUWUxoH9", + "outputId": "7acecf34-8e5a-4a87-caa8-c2bff39d1376" + }, + "source": [ + "from pprint import pprint\n", + "from feast import FeatureStore\n", + "\n", + "store = FeatureStore(repo_path=\".\")\n", + "\n", + "feature_vector = store.get_online_features(\n", + " features=[\n", + " \"driver_hourly_stats:conv_rate\",\n", + " \"driver_hourly_stats:acc_rate\",\n", + " \"driver_hourly_stats:avg_daily_trips\",\n", + " ],\n", + " entity_rows=[\n", + " {\"driver_id\": 1004},\n", + " {\"driver_id\": 1005},\n", + " ],\n", + ").to_dict()\n", + "\n", + "pprint(feature_vector)" + ], + "execution_count": 10, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "{'acc_rate': [0.29913440346717834, 0.4562472701072693],\n", + " 'avg_daily_trips': [474, 261],\n", + " 'conv_rate': [0.20237785577774048, 0.30597227811813354],\n", + " 'driver_id': [1004, 1005]}\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "lg68gH2sy6H1" + }, + "source": [ + "# Next steps\n", + "\n", + "- Read the [Concepts](https://docs.feast.dev/getting-started/concepts/) page to understand the Feast data model and architecture.\n", + "- Check out our [Tutorials](https://docs.feast.dev/tutorials/tutorials-overview) section for more examples on how to use Feast.\n", + "- Follow our [Running Feast with Snowflake/GCP/AWS](https://docs.feast.dev/how-to-guides/feast-snowflake-gcp-aws) guide for a more in-depth tutorial on using Feast.\n", + "- Join other Feast users and contributors in [Slack](https://slack.feast.dev/) and become part of the community!" + ] + } + ] +} \ No newline at end of file diff --git a/go.mod b/go.mod index 6e71e5b637..4fb5ef8300 100644 --- a/go.mod +++ b/go.mod @@ -1,36 +1,53 @@ module github.com/feast-dev/feast +go 1.17 + require ( - github.com/Masterminds/goutils v1.1.0 // indirect - github.com/Masterminds/semver v1.5.0 // indirect - github.com/Masterminds/sprig v2.22.0+incompatible // indirect - github.com/feast-dev/feast/sdk/go v0.0.0-20200516052424-09ff3dda724c // indirect + github.com/apache/arrow/go/v8 v8.0.0-20220408212425-58fe60f59289 github.com/ghodss/yaml v1.0.0 - github.com/gogo/protobuf v1.3.1 // indirect - github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e // indirect - github.com/golang/mock v1.2.0 - github.com/golang/protobuf v1.4.3 - github.com/google/go-cmp v0.5.0 - github.com/huandu/xstrings v1.2.0 // indirect - github.com/lyft/protoc-gen-validate v0.1.0 // indirect - github.com/mitchellh/copystructure v1.0.0 // indirect - github.com/mitchellh/go-homedir v1.1.0 - github.com/mwitkow/go-proto-validators v0.2.0 // indirect - github.com/pseudomuto/protoc-gen-doc v1.3.0 // indirect - github.com/pseudomuto/protokit v0.2.0 // indirect - github.com/spf13/cobra v0.0.4 - github.com/spf13/viper v1.4.0 - github.com/woop/protoc-gen-doc v1.3.0 // indirect - go.opencensus.io v0.22.3 // indirect - golang.org/x/lint v0.0.0-20200302205851-738671d3881b // indirect - golang.org/x/net v0.0.0-20201021035429-f5854403a974 - golang.org/x/tools v0.0.0-20201124005743-911501bfb504 // indirect - google.golang.org/grpc v1.29.1 - google.golang.org/protobuf v1.25.0 // indirect - gopkg.in/russross/blackfriday.v2 v2.0.0 // indirect - gopkg.in/yaml.v2 v2.2.4 - istio.io/gogo-genproto v0.0.0-20191212213402-78a529a42cd8 // indirect - istio.io/tools v0.0.0-20191228030621-c4eb6a11039c // indirect + github.com/go-python/gopy v0.4.0 + github.com/go-redis/redis/v8 v8.11.4 + github.com/golang/protobuf v1.5.2 + github.com/google/uuid v1.3.0 + github.com/mattn/go-sqlite3 v1.14.12 + github.com/spaolacci/murmur3 v1.1.0 + github.com/stretchr/testify v1.7.0 + google.golang.org/grpc v1.45.0 + google.golang.org/protobuf v1.28.0 +) + +require ( + github.com/JohnCGriffin/overflow v0.0.0-20211019200055-46fa312c352c // indirect + github.com/andybalholm/brotli v1.0.4 // indirect + github.com/apache/thrift v0.15.0 // indirect + github.com/cespare/xxhash/v2 v2.1.2 // indirect + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect + github.com/goccy/go-json v0.9.6 // indirect + github.com/golang/snappy v0.0.4 // indirect + github.com/gonuts/commander v0.1.0 // indirect + github.com/gonuts/flag v0.1.0 // indirect + github.com/google/flatbuffers v2.0.6+incompatible // indirect + github.com/klauspost/asmfmt v1.3.2 // indirect + github.com/klauspost/compress v1.15.1 // indirect + github.com/klauspost/cpuid/v2 v2.0.12 // indirect + github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8 // indirect + github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3 // indirect + github.com/pierrec/lz4/v4 v4.1.14 // indirect + github.com/pkg/errors v0.9.1 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/zeebo/xxh3 v1.0.2 // indirect + golang.org/x/exp v0.0.0-20220407100705-7b9b53b0aca4 // indirect + golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3 // indirect + golang.org/x/net v0.0.0-20220407224826-aac1ed45d8e3 // indirect + golang.org/x/sys v0.0.0-20220406163625-3f8b81556e12 // indirect + golang.org/x/text v0.3.7 // indirect + golang.org/x/tools v0.1.10 // indirect + golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect + google.golang.org/genproto v0.0.0-20220407144326-9054f6ed7bac // indirect + gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect + gopkg.in/yaml.v2 v2.4.0 // indirect + gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c // indirect ) -go 1.13 +replace github.com/go-python/gopy v0.4.0 => github.com/feast-dev/gopy v0.4.1-0.20220329011409-d705e6cd1d9b diff --git a/go.sum b/go.sum index bd68e3f863..ef9a0c6989 100644 --- a/go.sum +++ b/go.sum @@ -1,460 +1,536 @@ cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= -cuelang.org/go v0.0.14/go.mod h1:gehQASsTv+lFZknWIG0hANGVSBiHD7HyKWmAdEZL3No= -fortio.org/fortio v1.3.1/go.mod h1:Go0fRqoPJ1xy5JOWcS23jyF58byVZxFyEePYsGmCR0k= -github.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78/go.mod h1:LmzpDX56iTiv29bbRTIsUNlaFfuhWRQBWjQdVyAevI8= -github.com/Azure/go-autorest v11.1.2+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24= -github.com/Azure/go-autorest/autorest v0.9.0/go.mod h1:xyHB1BMZT0cuDHU7I0+g046+BFDTQ8rEZB0s4Yfa6bI= -github.com/Azure/go-autorest/autorest/adal v0.5.0/go.mod h1:8Z9fGy2MpX0PvDjB1pEgQTmVqjGhiHBW7RJJEciWzS0= -github.com/Azure/go-autorest/autorest/date v0.1.0/go.mod h1:plvfp3oPSKwf2DNjlBjWF/7vwR+cUD/ELuzDCXwHUVA= -github.com/Azure/go-autorest/autorest/mocks v0.1.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0= -github.com/Azure/go-autorest/autorest/mocks v0.2.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0= -github.com/Azure/go-autorest/logger v0.1.0/go.mod h1:oExouG+K6PryycPJfVSxi/koC6LSNgds39diKLz7Vrc= -github.com/Azure/go-autorest/tracing v0.5.0/go.mod h1:r/s2XiOKccPW3HrqB+W0TQzfbtp2fGCgRFtBroKn4Dk= -github.com/BurntSushi/toml v0.3.0/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ= +dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= +dmitri.shuralyov.com/gpu/mtl v0.0.0-20201218220906-28db891af037/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= +gioui.org v0.0.0-20210308172011-57750fc8a0a6/go.mod h1:RSH6KIUZ0p2xy5zHDxgAM4zumjgTw83q2ge/PI+yyw8= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= -github.com/Masterminds/goutils v1.1.0 h1:zukEsf/1JZwCMgHiK3GZftabmxiCw4apj3a28RPBiVg= -github.com/Masterminds/goutils v1.1.0/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= -github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww= -github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= -github.com/Masterminds/sprig v2.22.0+incompatible h1:z4yfnGrZ7netVz+0EDJ0Wi+5VZCSYp4Z0m2dk6cEM60= -github.com/Masterminds/sprig v2.22.0+incompatible/go.mod h1:y6hNFY5UBTIWBxnzTeuNhlNS5hqE0NB0E6fgfo2Br3o= -github.com/NYTimes/gziphandler v0.0.0-20170623195520-56545f4a5d46/go.mod h1:3wb06e3pkSAbeQ52E9H9iFoQsEEwGN64994WTCIhntQ= +github.com/JohnCGriffin/overflow v0.0.0-20211019200055-46fa312c352c h1:RGWPOewvKIROun94nF7v2cua9qP+thov/7M50KEoeSU= +github.com/JohnCGriffin/overflow v0.0.0-20211019200055-46fa312c352c/go.mod h1:X0CRv0ky0k6m906ixxpzmDRLvX58TFUKS2eePweuyxk= +github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible/go.mod h1:r7JcOSlj0wfOMncg0iLm8Leh48TZaKVeNIfJntJ2wa0= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= -github.com/PuerkitoBio/purell v1.0.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= -github.com/PuerkitoBio/purell v1.1.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= -github.com/PuerkitoBio/urlesc v0.0.0-20160726150825-5bd2802263f2/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= -github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= +github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo= +github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI= +github.com/VividCortex/gohistogram v1.0.0/go.mod h1:Pf5mBqqDxYaXu3hDrrU+w6nw50o/4+TcAqDqk/vUH7g= +github.com/afex/hystrix-go v0.0.0-20180502004556-fa1af6a1f4f5/go.mod h1:SkGFH1ia65gfNATL8TAiHDNxPzPdmEL5uirI2Uyuz6c= +github.com/ajstarks/svgo v0.0.0-20180226025133-644b8db467af/go.mod h1:K08gAheRH3/J6wwsYMMT4xOr94bZjxIelGM0+d/wbFw= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= -github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= -github.com/asaskevich/govalidator v0.0.0-20180720115003-f9ffefc3facf/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY= +github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY3JY= +github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= +github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= +github.com/apache/arrow/go/v8 v8.0.0-20220408212425-58fe60f59289 h1:nzLohtTu6I/mbr7xjFrMKDk88mkL/Lcp8kVcIITGjaE= +github.com/apache/arrow/go/v8 v8.0.0-20220408212425-58fe60f59289/go.mod h1:UUe+gJaMnuFD6icfGSJxUjG/tX/POUbPS/wE+EFyncM= +github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= +github.com/apache/thrift v0.13.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= +github.com/apache/thrift v0.15.0 h1:aGvdaR0v1t9XLgjtBYwxcBvBOTMqClzwE26CHOgjW1Y= +github.com/apache/thrift v0.15.0/go.mod h1:PHK3hniurgQaNMZYaCLEqXKsYK8upmhPbmdP2FXSqgU= +github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= +github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= +github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= +github.com/aryann/difflib v0.0.0-20170710044230-e206f873d14a/go.mod h1:DAHtR1m6lCRdSC2Tm3DSWRPvIPr6xNKyeHdqDQSQT+A= +github.com/aws/aws-lambda-go v1.13.3/go.mod h1:4UKl9IzQMoD+QF79YdCuzCwp8VbmG4VAQwij/eHl5CU= +github.com/aws/aws-sdk-go v1.27.0/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= +github.com/aws/aws-sdk-go-v2 v0.18.0/go.mod h1:JWVYvqSMppoMJC0x5wdwiImzgXTI9FuZwxzkQq9wy+g= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= -github.com/blang/semver v3.5.0+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk= +github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= +github.com/boombuler/barcode v1.0.0/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8= +github.com/casbin/casbin/v2 v2.1.2/go.mod h1:YcPU1XXisHhLzuxH9coDNf2FbKpjGlbCg3n9yuLkIJQ= +github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= -github.com/client9/gospell v0.0.0-20160306015952-90dfc71015df h1:XXCjxndsxMyNjoZtyuyDnzSck+h681QN7vKkK0EIVq0= -github.com/client9/gospell v0.0.0-20160306015952-90dfc71015df/go.mod h1:X4IDm8zK6KavjWkfKQCet43DKeLii9nJhUK/seHoSbA= +github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cespare/xxhash/v2 v2.1.2 h1:YRXhKfTDauu4ajMg1TPgFO5jnlC2HCbmLXMcTG5cbYE= +github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/clbanning/x2j v0.0.0-20191024224557-825249438eec/go.mod h1:jMjuTZXRI4dUb/I5gc9Hdhagfvm9+RyrPryS/auMzxE= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= -github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ= -github.com/cockroachdb/apd/v2 v2.0.1/go.mod h1:DDxRlzC2lo3/vSlmSoS7JkqbbrARPuFOGr0B9pvN3Gw= -github.com/coreos/bbolt v1.3.1-coreos.6/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk= -github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk= -github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= -github.com/coreos/etcd v3.3.13+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= -github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk= -github.com/coreos/go-oidc v0.0.0-20180117170138-065b426bd416/go.mod h1:CgnwVTmzoESiwO9qyAFEMiHoZ1nMCKZlZ9V6mm3/LKc= -github.com/coreos/go-semver v0.0.0-20180108230905-e214231b295a/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= +github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= +github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8= +github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd/go.mod h1:sE/e/2PUdi/liOCUjSTXgM1o87ZssimdTWN964YiIeI= github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= github.com/coreos/go-systemd v0.0.0-20180511133405-39ca1b05acc7/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= -github.com/coreos/pkg v0.0.0-20180108230652-97fdf19511ea/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= -github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= -github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE= -github.com/davecgh/go-spew v0.0.0-20151105211317-5215b55f46b2/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/coreos/pkg v0.0.0-20160727233714-3ac0863d7acf/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= +github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/dgrijalva/jwt-go v0.0.0-20160705203006-01aeca54ebda/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= -github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no= -github.com/docker/docker v0.7.3-0.20190327010347-be7ac8be2ae0/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= -github.com/docker/go-units v0.3.3/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= -github.com/docker/spdystream v0.0.0-20160310174837-449fdfce4d96/go.mod h1:Qh8CwZgvJUkLughtfhJv5dyTYa91l1fOUCrgjqmcifM= +github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78= +github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc= github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE= -github.com/elazarl/goproxy v0.0.0-20170405201442-c4fc26588b6e/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc= -github.com/emicklei/go-restful v0.0.0-20170410110728-ff4f55a20633/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs= -github.com/emicklei/proto v1.6.15/go.mod h1:rn1FgRS/FANiZdD2djyH7TMA9jdRDcYQ9IEN9yvjX0A= +github.com/dustin/go-humanize v0.0.0-20171111073723-bb3d318650d4/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= +github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs= +github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU= +github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I= +github.com/edsrzf/mmap-go v1.0.0/go.mod h1:YO35OhQPt3KJa3ryjFM5Bs14WD66h8eGKpfaBNrHW5M= +github.com/envoyproxy/go-control-plane v0.6.9/go.mod h1:SBwIajubJHhxtWwsL9s8ss4safvEdbitLhGGK48rN6g= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= +github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= -github.com/evanphx/json-patch v0.0.0-20190203023257-5858425f7550/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= -github.com/evanphx/json-patch v4.2.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= -github.com/feast-dev/feast/sdk/go v0.0.0-20200516052424-09ff3dda724c h1:jltCNN1tpaFxCslQtHUfU5u5qodH/D8rqPmxvRWGNWM= -github.com/feast-dev/feast/sdk/go v0.0.0-20200516052424-09ff3dda724c/go.mod h1:F3sMDmJ9hxjlh0Z7fM6/atvMJd8moahKxUF1LCzVthQ= -github.com/feast-dev/feast/sdk/go v0.5.0 h1:h4UiFgqeWrw9voPqigwiSYWl/vjUn2mgoUAFIa29s3A= -github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I= +github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= +github.com/feast-dev/gopy v0.4.1-0.20220329011409-d705e6cd1d9b h1:C/oK6gi12Q7fiiVCI3e62tqWCSXqsTz9OpyK249XI84= +github.com/feast-dev/gopy v0.4.1-0.20220329011409-d705e6cd1d9b/go.mod h1:ZO6vpitQ61NVoQP/2yOubPS6ET5pP3CAWCiMYn5eqCc= +github.com/fogleman/gg v1.2.1-0.20190220221249-0403632d5b90/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k= +github.com/fogleman/gg v1.3.0/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k= +github.com/franela/goblin v0.0.0-20200105215937-c9ffbefa60db/go.mod h1:7dvUGVsVBjqR7JHJk0brhHOZYGmfBYOrK0ZhYMEtBr4= +github.com/franela/goreq v0.0.0-20171204163338-bcd34c9993f8/go.mod h1:ZhphrRTfi2rbfLwlschooIH4+wKKDR4Pdxhh+TRoA20= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= -github.com/getkin/kin-openapi v0.1.1-0.20190507152207-d3180292eead/go.mod h1:V1z9xl9oF5Wt7v32ne4FmiF1alpS4dM6mNzoywPOXlk= -github.com/ghodss/yaml v0.0.0-20150909031657-73d445a93680/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= -github.com/ghodss/yaml v0.0.0-20180820084758-c7ce16629ff4/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4= +github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= -github.com/globalsign/mgo v0.0.0-20180905125535-1ca0a4f7cbcb/go.mod h1:xkRDCp4j0OGD1HRkm4kmhM+pmpv3AKq5SU7GMg4oO/Q= -github.com/globalsign/mgo v0.0.0-20181015135952-eeefdecb41b8/go.mod h1:xkRDCp4j0OGD1HRkm4kmhM+pmpv3AKq5SU7GMg4oO/Q= +github.com/go-fonts/dejavu v0.1.0/go.mod h1:4Wt4I4OU2Nq9asgDCteaAaWZOV24E+0/Pwo0gppep4g= +github.com/go-fonts/latin-modern v0.2.0/go.mod h1:rQVLdDMK+mK1xscDwsqM5J8U2jrRa3T0ecnM9pNujks= +github.com/go-fonts/liberation v0.1.1/go.mod h1:K6qoJYypsmfVjWg8KOVDQhLc8UDgIK2HYqyqAO9z7GY= +github.com/go-fonts/stix v0.1.0/go.mod h1:w/c1f0ldAUlJmLBvlbkvVXLAD+tAMqobIIQpmnUIzUY= +github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/kit v0.10.0/go.mod h1:xUsJbQ/Fp4kEt7AFgCuvyX4a71u8h9jB8tj/ORgOZ7o= +github.com/go-latex/latex v0.0.0-20210118124228-b3d85cf34e07/go.mod h1:CO1AlKB2CSIqUrmQPqA0gdRIlnLEY0gK5JGjh37zN5U= github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= -github.com/go-logr/logr v0.1.0/go.mod h1:ixOQHD9gLJUVQQ2ZOR7zLEifBX6tGkNJF4QyIY7sIas= -github.com/go-openapi/analysis v0.0.0-20180825180245-b006789cd277/go.mod h1:k70tL6pCuVxPJOHXQ+wIac1FUrvNkHolPie/cLEU6hI= -github.com/go-openapi/analysis v0.17.0/go.mod h1:IowGgpVeD0vNm45So8nr+IcQ3pxVtpRoBWb8PVZO0ik= -github.com/go-openapi/analysis v0.17.2/go.mod h1:IowGgpVeD0vNm45So8nr+IcQ3pxVtpRoBWb8PVZO0ik= -github.com/go-openapi/errors v0.17.0/go.mod h1:LcZQpmvG4wyF5j4IhA73wkLFQg+QJXOQHVjmcZxhka0= -github.com/go-openapi/errors v0.17.2/go.mod h1:LcZQpmvG4wyF5j4IhA73wkLFQg+QJXOQHVjmcZxhka0= -github.com/go-openapi/jsonpointer v0.0.0-20160704185906-46af16f9f7b1/go.mod h1:+35s3my2LFTysnkMfxsJBAMHj/DoqoB9knIWoYG/Vk0= -github.com/go-openapi/jsonpointer v0.17.0/go.mod h1:cOnomiV+CVVwFLk0A/MExoFMjwdsUdVpsRhURCKh+3M= -github.com/go-openapi/jsonpointer v0.19.0/go.mod h1:cOnomiV+CVVwFLk0A/MExoFMjwdsUdVpsRhURCKh+3M= -github.com/go-openapi/jsonreference v0.0.0-20160704190145-13c6e3589ad9/go.mod h1:W3Z9FmVs9qj+KR4zFKmDPGiLdk1D9Rlm7cyMvf57TTg= -github.com/go-openapi/jsonreference v0.17.0/go.mod h1:g4xxGn04lDIRh0GJb5QlpE3HfopLOL6uZrK/VgnsK9I= -github.com/go-openapi/jsonreference v0.19.0/go.mod h1:g4xxGn04lDIRh0GJb5QlpE3HfopLOL6uZrK/VgnsK9I= -github.com/go-openapi/loads v0.17.0/go.mod h1:72tmFy5wsWx89uEVddd0RjRWPZm92WRLhf7AC+0+OOU= -github.com/go-openapi/loads v0.17.2/go.mod h1:72tmFy5wsWx89uEVddd0RjRWPZm92WRLhf7AC+0+OOU= -github.com/go-openapi/runtime v0.0.0-20180920151709-4f900dc2ade9/go.mod h1:6v9a6LTXWQCdL8k1AO3cvqx5OtZY/Y9wKTgaoP6YRfA= -github.com/go-openapi/runtime v0.17.2/go.mod h1:QO936ZXeisByFmZEO1IS1Dqhtf4QV1sYYFtIq6Ld86Q= -github.com/go-openapi/spec v0.0.0-20160808142527-6aced65f8501/go.mod h1:J8+jY1nAiCcj+friV/PDoE1/3eeccG9LYBs0tYvLOWc= -github.com/go-openapi/spec v0.17.0/go.mod h1:XkF/MOi14NmjsfZ8VtAKf8pIlbZzyoTvZsdfssdxcBI= -github.com/go-openapi/spec v0.17.2/go.mod h1:XkF/MOi14NmjsfZ8VtAKf8pIlbZzyoTvZsdfssdxcBI= -github.com/go-openapi/strfmt v0.17.0/go.mod h1:P82hnJI0CXkErkXi8IKjPbNBM6lV6+5pLP5l494TcyU= -github.com/go-openapi/swag v0.0.0-20160704191624-1d0bd113de87/go.mod h1:DXUve3Dpr1UfpPtxFw+EFuQ41HhCWZfha5jSVRG7C7I= -github.com/go-openapi/swag v0.17.0/go.mod h1:AByQ+nYG6gQg71GINrmuDXCPWdL640yX49/kXLo40Tg= -github.com/go-openapi/swag v0.17.2/go.mod h1:AByQ+nYG6gQg71GINrmuDXCPWdL640yX49/kXLo40Tg= -github.com/go-openapi/validate v0.17.0/go.mod h1:Uh4HdOzKt19xGIGm1qHf/ofbX1YQ4Y+MYsct2VUrAJ4= -github.com/go-openapi/validate v0.18.0/go.mod h1:Uh4HdOzKt19xGIGm1qHf/ofbX1YQ4Y+MYsct2VUrAJ4= +github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= +github.com/go-logr/logr v0.4.0/go.mod h1:z6/tIYblkpsD+a4lm/fGIIU9mZ+XfAiaFtq7xTgseGU= +github.com/go-redis/redis/v8 v8.11.4 h1:kHoYkfZP6+pe04aFTnhDH6GDROa5yJdHJVNxV3F46Tg= +github.com/go-redis/redis/v8 v8.11.4/go.mod h1:2Z2wHZXdQpCDXEGzqMockDpNyYvi2l4Pxt6RJr792+w= +github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= -github.com/gogo/protobuf v0.0.0-20171007142547-342cbe0a0415/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE= +github.com/goccy/go-json v0.7.10/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= +github.com/goccy/go-json v0.9.6 h1:5/4CtRQdtsX0sal8fdVhTaiMN01Ri8BExZZ8iRmHQ6E= +github.com/goccy/go-json v0.9.6/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= +github.com/gogo/googleapis v1.1.0/go.mod h1:gf4bu3Q80BeJ6H1S1vYPm8/ELATdvryBaNFGgqEef3s= github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4= -github.com/gogo/protobuf v1.2.2-0.20190723190241-65acae22fc9d/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o= -github.com/gogo/protobuf v1.3.0 h1:G8O7TerXerS4F6sx9OV7/nRfJdnXgHZu/S/7F2SN+UE= -github.com/gogo/protobuf v1.3.0/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o= -github.com/gogo/protobuf v1.3.1 h1:DqDEcV5aeaTmdFBePNpYsp3FlcVH/2ISVVM9Qf8PSls= -github.com/gogo/protobuf v1.3.1/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o= -github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b h1:VKtxabqXZkF25pY9ekfRL6a582T4P37/31XEstQ5p58= +github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0/go.mod h1:E/TSTwGwJL78qG/PmXZO1EjYhfJinVAhrmmHX6Z8B9k= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6 h1:ZgQEtGgCBiWRM39fZuwSd1LwSqqSW0hOdXCYYDX0R3I= github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e h1:1r7pUrabqp18hOBcwBwiTsbnFeTZHV9eER/QT5JVZxY= -github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/mock v1.2.0 h1:28o5sBqPkBsMGnC6b4MvE2TzSr5/AT4c/1fLqVGIwlk= -github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/protobuf v0.0.0-20161109072736-4bd1920723d7/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.1 h1:YF8+flBXS5eO826T4nzqPrxfhQThhXl0YzfuUPu4SBg= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.2 h1:6nsPYzhq5kReh6QImI3k5qWzO4PEbvbIW2cwSfR/6xs= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/golang/protobuf v1.3.5 h1:F768QJ1E9tib+q5Sc8MkdJi1RxLTbRcTf8LJV56aRls= -github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= -github.com/golang/protobuf v1.4.0 h1:oOuy+ugB+P/kBdUnG5QaMXSIyJ1q38wWSojYCb3z5VQ= github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= -github.com/golang/protobuf v1.4.2 h1:+Z5KGCizgyZCbGh1KZqA0fcLLkwbsjIzS4aV2v7wJX0= github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= -github.com/golang/protobuf v1.4.3 h1:JjCZWpVbqXDqFVmTfYWEVTMIYrL/NPdPSCHPJ0T/raM= github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= -github.com/google/btree v0.0.0-20160524151835-7d79101e329e/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= +github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= +github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/gonuts/commander v0.1.0 h1:EcDTiVw9oAVORFjQOEOuHQqcl6OXMyTgELocTq6zJ0I= +github.com/gonuts/commander v0.1.0/go.mod h1:qkb5mSlcWodYgo7vs8ulLnXhfinhZsZcm6+H/z1JjgY= +github.com/gonuts/flag v0.1.0 h1:fqMv/MZ+oNGu0i9gp0/IQ/ZaPIDoAZBOBaJoV7viCWM= +github.com/gonuts/flag v0.1.0/go.mod h1:ZTmTGtrSPejTo/SRNhCqwLTmiAgyBdCkLYhHrAoBdz4= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= -github.com/google/go-cmp v0.2.0 h1:+dTQ8DZQJz0Mb/HjFlkptS1FeQ4cWSnN941F8aEG4SQ= +github.com/google/flatbuffers v2.0.5+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= +github.com/google/flatbuffers v2.0.6+incompatible h1:XHFReMv7nFFusa+CEokzWbzaYocKXI6C7hdU5Kgh9Lw= +github.com/google/flatbuffers v2.0.6+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= -github.com/google/go-cmp v0.3.0 h1:crn/baboCvb5fXaQ0IJ1SGTsTVrWpDsCWC8EGETZijY= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.3.1 h1:Xye71clBPdm5HgqGwUkwhbynsUJZhDbS20FvLhQ2izg= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.4.0 h1:xsAVV57WRhGj6kEIi8ReJzQlHHqcBYCElAvkovg3B/4= github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/gofuzz v0.0.0-20161122191042-44d81051d367/go.mod h1:HP5RmnzzSNb993RKQDq4+1A4ia9nllfqcQFTQJedwGI= -github.com/google/gofuzz v0.0.0-20170612174753-24818f796faf/go.mod h1:HP5RmnzzSNb993RKQDq4+1A4ia9nllfqcQFTQJedwGI= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.7 h1:81/ik6ipDQS2aGcBfIN5dHDB36BwrStyeAQquSYCV4o= +github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= -github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= -github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.1.1 h1:Gkbcsh/GbpXz7lPftLA3P6TYMwjCLYm83jiFQZF/3gY= -github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= -github.com/googleapis/gnostic v0.0.0-20170729233727-0c5108395e2d/go.mod h1:sJBsCZ4ayReDTBIg8b9dl28c5xFWyhBTVRp3pOg5EKY= -github.com/googleapis/gnostic v0.3.1/go.mod h1:on+2t9HRStVgn95RSsFWFz+6Q0Snyqv1awfrALZdbtU= -github.com/gophercloud/gophercloud v0.0.0-20190126172459-c818fa66e4c8/go.mod h1:3WdhXV3rUYy9p6AUW8d94kr+HS62Y4VL9mBnFxsD8q4= -github.com/gophercloud/gophercloud v0.1.0/go.mod h1:vxM41WHh5uqHVBMZHzuwNOHh8XEoIEcSTewFxm1c5g8= +github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.2.0 h1:qJYtXnJRWmpe7m/3XlyhrsLrEURqHRM2kxzoxXqyUDs= +github.com/google/uuid v1.2.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= +github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg= +github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= +github.com/gorilla/mux v1.7.3/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= github.com/gorilla/websocket v0.0.0-20170926233335-4201258b820c/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= -github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= -github.com/gregjones/httpcache v0.0.0-20170728041850-787624de3eb7/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA= -github.com/gregjones/httpcache v0.0.0-20190611155906-901d90724c79/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA= -github.com/grpc-ecosystem/go-grpc-middleware v0.0.0-20190222133341-cfaf5686ec79/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= -github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= -github.com/grpc-ecosystem/go-grpc-prometheus v0.0.0-20170330212424-2500245aa611/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= +github.com/grpc-ecosystem/go-grpc-middleware v1.0.1-0.20190118093823-f849b5445de4/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= -github.com/grpc-ecosystem/grpc-gateway v1.3.0/go.mod h1:RSKVYQBd5MCa4OVpNdGskqpgL2+G+NZTnrVHpWWfpdw= -github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= +github.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= +github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= +github.com/hashicorp/consul/api v1.3.0/go.mod h1:MmDNSzIMUjNpY/mQ398R4bk2FnqQLoPndWW5VkKPlCE= +github.com/hashicorp/consul/sdk v0.3.0/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= +github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= +github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU= +github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= +github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= +github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-version v1.2.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= +github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/hashicorp/golang-lru v0.5.1 h1:0hERBMJE1eitiLkihrMvRVBYAkpHzc/J3QdDN+dAcgU= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= -github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= +github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= +github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ= +github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I= +github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= -github.com/huandu/xstrings v1.2.0 h1:yPeWdRnmynF7p+lLYz0H2tthW9lqhMJrQV/U7yy4wX0= -github.com/huandu/xstrings v1.2.0/go.mod h1:DvyZB1rfVYsBIigL8HwpZgxHwXozlTgGqn63UyNX5k4= -github.com/imdario/mergo v0.3.5/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= -github.com/imdario/mergo v0.3.8 h1:CGgOkSJeqMRmt0D9XLWExdT4m4F1vd3FV3VPt+0VxkQ= -github.com/imdario/mergo v0.3.8/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= -github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM= +github.com/hudl/fargo v1.3.0/go.mod h1:y3CKSmjA+wD2gak7sUSXTAoopbhU08POFhmITJgmKTg= github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= -github.com/jonboulle/clockwork v0.0.0-20141017032234-72f9bd7c4e0c/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= +github.com/influxdata/influxdb1-client v0.0.0-20191209144304-8bf82d3c094d/go.mod h1:qj24IKcXYK6Iy9ceXlo3Tc+vtHo9lIhSX5JddghvEPo= +github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= -github.com/json-iterator/go v0.0.0-20180612202835-f2b4162afba3/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= -github.com/json-iterator/go v0.0.0-20180701071628-ab8a2e0c74be/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= +github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= -github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= +github.com/json-iterator/go v1.1.8/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= +github.com/jung-kurt/gofpdf v1.0.0/go.mod h1:7Id9E/uU8ce6rXgefFLlgrJj/GYY22cpxn+r32jIOes= +github.com/jung-kurt/gofpdf v1.0.3-0.20190309125859-24315acbbda5/go.mod h1:7Id9E/uU8ce6rXgefFLlgrJj/GYY22cpxn+r32jIOes= github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q= -github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/klauspost/asmfmt v1.3.1/go.mod h1:AG8TuvYojzulgDAMCnYn50l/5QV3Bs/tp6j0HLHbNSE= +github.com/klauspost/asmfmt v1.3.2 h1:4Ri7ox3EwapiOjCki+hw14RyKk201CN4rzyCJRFLpK4= +github.com/klauspost/asmfmt v1.3.2/go.mod h1:AG8TuvYojzulgDAMCnYn50l/5QV3Bs/tp6j0HLHbNSE= +github.com/klauspost/compress v1.14.2/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= +github.com/klauspost/compress v1.15.1 h1:y9FcTHGyrebwfP0ZZqFiaxTaiDnUrGkJkI+f583BL1A= +github.com/klauspost/compress v1.15.1/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= +github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= +github.com/klauspost/cpuid/v2 v2.0.12 h1:p9dKCg8i4gmOxtv35DvrYoWqYzQrvEVdjQ762Y0OqZE= +github.com/klauspost/cpuid/v2 v2.0.12/go.mod h1:g2LTdtYhdyuGPqyWyv7qRAmj1WBqxuObKfj5c0PQa7c= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.2.1 h1:Fmg33tUaq4/8ym9TJN1x7sLJnHVwhP33CNkpYV/7rwI= +github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= -github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= -github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= -github.com/lyft/protoc-gen-validate v0.1.0 h1:NytKd9K7UW7Szxn+9PYNsaJ/98TL/WsDq4ro4ZVuh5o= -github.com/lyft/protoc-gen-validate v0.1.0/go.mod h1:XbGvPuh87YZc5TdIa2/I4pLk0QoUACkjt2znoq26NVQ= -github.com/magiconair/properties v1.8.0 h1:LLgXmsheXeRoUOBOjtwPQCWIYqM/LU1ayDtDePerRcY= -github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= -github.com/mailru/easyjson v0.0.0-20160728113105-d5b7844b561a/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= -github.com/mailru/easyjson v0.0.0-20180823135443-60711f1a8329/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/lightstep/lightstep-tracer-common/golang/gogo v0.0.0-20190605223551-bc2310a04743/go.mod h1:qklhhLq1aX+mtWk9cPHPzaBjWImj5ULL6C7HFJtXQMM= +github.com/lightstep/lightstep-tracer-go v0.18.1/go.mod h1:jlF1pusYV4pidLvZ+XD0UBX0ZE6WURAspgAczcDHrL4= +github.com/lyft/protoc-gen-validate v0.0.13/go.mod h1:XbGvPuh87YZc5TdIa2/I4pLk0QoUACkjt2znoq26NVQ= +github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= +github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= +github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= +github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= +github.com/mattn/go-sqlite3 v1.14.12 h1:TJ1bhYJPV44phC+IMu1u2K/i5RriLTPe+yc68XDJ1Z0= +github.com/mattn/go-sqlite3 v1.14.12/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= -github.com/mitchellh/copystructure v1.0.0 h1:Laisrj+bAB6b/yJwB5Bt3ITZhGJdqmxquMKeZ+mmkFQ= -github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw= -github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= -github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= -github.com/mitchellh/mapstructure v1.1.2 h1:fmNYVwqnSfB9mZU6OS2O6GsXM+wcskZDuKQzvN1EDeE= +github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= +github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8 h1:AMFGa4R4MiIpspGNG7Z948v4n35fFGB3RR3G/ry4FWs= +github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8/go.mod h1:mC1jAcsrzbxHt8iiaC+zU4b1ylILSosueou12R++wfY= +github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3 h1:+n/aFZefKZp7spd8DFdX7uMikMLXX4oubIzJF4kv/wI= +github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3/go.mod h1:RagcQ7I8IeTMnF8JTXieKnO4Z6JCsikNEzj0DwauVzE= +github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= +github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= +github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg= +github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY= +github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= -github.com/mitchellh/reflectwalk v1.0.0 h1:9D+8oIskB4VJBN5SFlmc27fSlIBZaov1Wpk/IfikLNY= -github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= -github.com/modern-go/reflect2 v0.0.0-20180320133207-05fbef0ca5da/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= -github.com/mpvl/unique v0.0.0-20150818121801-cbe035fff7de/go.mod h1:kJun4WP5gFuHZgRjZUWWuH1DTxCtxbHDOIJsudS8jzY= -github.com/munnerz/goautoneg v0.0.0-20120707110453-a547fc61f48d/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= -github.com/mwitkow/go-proto-validators v0.2.0 h1:F6LFfmgVnfULfaRsQWBbe7F7ocuHCr9+7m+GAeDzNbQ= -github.com/mwitkow/go-proto-validators v0.2.0/go.mod h1:ZfA1hW+UH/2ZHOWvQ3HnQaU0DtnpXu850MZiy+YUgcc= -github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f/go.mod h1:ZdcZmHo+o7JKHSa8/e818NopupXU1YMK5fe1lsApnBw= -github.com/natefinch/lumberjack v2.0.0+incompatible/go.mod h1:Wi9p2TTF5DG5oU+6YfsmYQpsTIOm0B1VNzQg9Mw6nPk= -github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= -github.com/onsi/ginkgo v0.0.0-20170829012221-11459a886d9c/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/nats-io/jwt v0.3.0/go.mod h1:fRYCDE99xlTsqUzISS1Bi75UBJ6ljOJQOAAu5VglpSg= +github.com/nats-io/jwt v0.3.2/go.mod h1:/euKqTS1ZD+zzjYrY7pseZrTtWQSjujC7xjPc8wL6eU= +github.com/nats-io/nats-server/v2 v2.1.2/go.mod h1:Afk+wRZqkMQs/p45uXdrVLuab3gwv3Z8C4HTBu8GD/k= +github.com/nats-io/nats.go v1.9.1/go.mod h1:ZjDU1L/7fJ09jvUSRVBR2e7+RnLiiIQyqyzEE/Zbp4w= +github.com/nats-io/nkeys v0.1.0/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w= +github.com/nats-io/nkeys v0.1.3/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w= +github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c= +github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= +github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= +github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= +github.com/oklog/oklog v0.3.2/go.mod h1:FCV+B7mhrz4o+ueLpx+KqkyXRGMWOYEvfiXtdGtbWGs= +github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= +github.com/olekukonko/tablewriter v0.0.0-20170122224234-a0225b3f23b5/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo= github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/ginkgo v1.8.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/gomega v0.0.0-20170829124025-dcabb60a477c/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA= -github.com/onsi/gomega v0.0.0-20190113212917-5533ce8a0da3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= -github.com/onsi/gomega v1.5.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= -github.com/opentracing/opentracing-go v1.1.0 h1:pWlfV3Bxv7k65HYwkikxat0+s3pV4bsqf19k25Ur8rU= +github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= +github.com/onsi/ginkgo v1.16.4 h1:29JGrr5oVBm5ulCWet69zQkzWipVXIol6ygQUe/EzNc= +github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0= +github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= +github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= +github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= +github.com/onsi/gomega v1.16.0 h1:6gjqkI8iiRHMvdccRJM8rVKjCWk6ZIm6FTm3ddIe4/c= +github.com/onsi/gomega v1.16.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY= +github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk= +github.com/opentracing-contrib/go-observer v0.0.0-20170622124052-a52f23424492/go.mod h1:Ngi6UdF0k5OKD5t5wlmGhe/EDKPoUM3BXZSSfIuJbis= +github.com/opentracing/basictracer-go v1.0.0/go.mod h1:QfBfYuafItcjQuMwinw9GhYKwFXS9KnPs5lxoYwgW74= +github.com/opentracing/opentracing-go v1.0.2/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= +github.com/openzipkin-contrib/zipkin-go-opentracing v0.4.5/go.mod h1:/wsWhb9smxSfWAKL3wpBW7V8scJMt8N8gnaMCS9E/cA= +github.com/openzipkin/zipkin-go v0.1.6/go.mod h1:QgAqvLzwWbR/WpD4A3cGpPtJrZXNIiJc5AZX7/PBEpw= +github.com/openzipkin/zipkin-go v0.2.1/go.mod h1:NaW6tEwdmWMaCDZzg8sh+IBNOxHMPnhQw8ySjnjRyN4= +github.com/openzipkin/zipkin-go v0.2.2/go.mod h1:NaW6tEwdmWMaCDZzg8sh+IBNOxHMPnhQw8ySjnjRyN4= +github.com/pact-foundation/pact-go v1.0.4/go.mod h1:uExwJY4kCzNPcHRj+hCR/HBbOOIwwtUjcrb0b5/5kLM= +github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k= -github.com/pelletier/go-toml v1.2.0 h1:T5zMGML61Wp+FlcbWjRDT7yAxhJNAiPPLOFECq181zc= -github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= -github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU= +github.com/performancecopilot/speed v3.0.0+incompatible/go.mod h1:/CLtqpZ5gBg1M9iaPbIdPPGyKcA8hKdoy6hAWba7Yac= +github.com/phpdave11/gofpdf v1.4.2/go.mod h1:zpO6xFn9yxo3YLyMvW8HcKWVdbNqgIfOOp2dXMnm1mY= +github.com/phpdave11/gofpdi v1.0.12/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI= +github.com/pierrec/lz4 v1.0.2-0.20190131084431-473cd7ce01a1/go.mod h1:3/3N9NVKO0jef7pBehbT1qWhCMrIgbYNnFAZCqQ5LRc= +github.com/pierrec/lz4 v2.0.5+incompatible h1:2xWsjqPFWcplujydGg4WmhC/6fZqK42wMM8aXeqhl0I= +github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= +github.com/pierrec/lz4/v4 v4.1.12/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= +github.com/pierrec/lz4/v4 v4.1.14 h1:+fL8AQEZtz/ijeNnpduH0bROTu0O3NZAlPjQxGn8LwE= +github.com/pierrec/lz4/v4 v4.1.14/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pmezard/go-difflib v0.0.0-20151028094244-d8ed2627bdf0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/profile v1.2.1/go.mod h1:hJw3o1OdXxsrSjjVksARp5W95eeEaEfptyVZyv6JUPA= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/pquerna/cachecontrol v0.0.0-20171018203845-0dec1b30a021/go.mod h1:prYjPmNq4d1NPVmpShWobRqXY3q7Vp+80DqgxxUrUIA= +github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= -github.com/prometheus/client_golang v0.9.2/go.mod h1:OsXs2jCmiKlQ1lTBmv21f2mNfw4xf/QclQDMrYNZzcM= -github.com/prometheus/client_golang v0.9.3/go.mod h1:/TN21ttK/J9q6uSwhBd54HahCDft0ttaMvbicHlPoso= +github.com/prometheus/client_golang v0.9.3-0.20190127221311-3c4408c8b829/go.mod h1:p2iRAGwDERtqlqzRXnrOVns+ignqQo//hLXqYxZYVNs= +github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= +github.com/prometheus/client_golang v1.3.0/go.mod h1:hJaj2vgQTGQmVCsAACORcieXFeDPbaTKGT+JTgUa3og= github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190115171406-56726106282f/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/common v0.0.0-20181113130724-41aa239b4cce/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= -github.com/prometheus/common v0.0.0-20181126121408-4724e9255275/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= -github.com/prometheus/common v0.4.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/client_model v0.1.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/common v0.2.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.7.0/go.mod h1:DjGbpBbp5NYNiECxcL/VnbXCCaQpKd3tt26CguLLsqA= github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= -github.com/prometheus/procfs v0.0.0-20181204211112-1dc9a6cbc91a/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= -github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= -github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU= -github.com/pseudomuto/protoc-gen-doc v1.3.0 h1:wpwmaSCWY2lGwkzAxAaqYcGyoklZjZmeXrJ/X7IskJM= -github.com/pseudomuto/protoc-gen-doc v1.3.0/go.mod h1:fwtQAY9erXp3mC92O8OTECnDlJT2r0Ff4KSEKbGEmy0= -github.com/pseudomuto/protokit v0.2.0 h1:hlnBDcy3YEDXH7kc9gV+NLaN0cDzhDvD1s7Y6FZ8RpM= -github.com/pseudomuto/protokit v0.2.0/go.mod h1:2PdH30hxVHsup8KpBTOXTBeMVhJZVio3Q8ViKSAXT0Q= -github.com/remyoudompheng/bigfft v0.0.0-20170806203942-52369c62f446/go.mod h1:uYEyJGbgTkfkS4+E/PavXkNJcbFIpEtjt2B0KDQ5+9M= +github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A= +github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= -github.com/rogpeppe/testscript v1.1.0/go.mod h1:lzMlnW8LS56mcdJoQYkrlzqOoTFCOemzt5LusJ93bDM= -github.com/russross/blackfriday v1.5.2 h1:HyvC0ARfnZBqnXwABFeSZHpKvJHJJfPz81GNueLj0oo= -github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= -github.com/russross/blackfriday/v2 v2.0.1 h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0RK8m9o+Q= +github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ= +github.com/rs/zerolog v1.21.0/go.mod h1:ZPhntP/xmq1nnND05hhpAh2QMhSsA4UN3MGZ6O2J3hM= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= -github.com/shurcooL/sanitized_anchor_name v1.0.0 h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo= +github.com/ruudk/golang-pdf417 v0.0.0-20181029194003-1af4ab5afa58/go.mod h1:6lfFZQK844Gfx8o5WFuvpxWRwnSoipWe/p622j1v06w= +github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= +github.com/samuel/go-zookeeper v0.0.0-20190923202752-2cc03de413da/go.mod h1:gi+0XIa01GRL2eRQVjQkKGqKF3SF9vZR/HnPullcV2E= +github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= -github.com/soheilhy/cmux v0.1.3/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= +github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= +github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= +github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= +github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= +github.com/sony/gobreaker v0.4.1/go.mod h1:ZKptC7FHNvhBz7dN2LGjPVBz2sZJmc0/PkyDJOjmxWY= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= -github.com/spf13/afero v1.1.2 h1:m8/z1t7/fwjysjQRYbP0RD+bUIF/8tJwPdEZsI83ACI= -github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= -github.com/spf13/afero v1.2.2/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk= -github.com/spf13/cast v1.3.0 h1:oget//CVOEoFewqQxwr0Ej5yjygnqGkvggSE/gB35Q8= -github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= -github.com/spf13/cobra v0.0.0-20180319062004-c439c4fa0937/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= -github.com/spf13/cobra v0.0.3 h1:ZlrZ4XsMRm04Fr5pSFxBgfND2EBVa1nLpiy1stUsX/8= +github.com/spaolacci/murmur3 v1.1.0 h1:7c1g84S4BPRrfL5Xrdp6fOJ206sU9y293DDHaoy0bLI= +github.com/spaolacci/murmur3 v1.1.0/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= -github.com/spf13/cobra v0.0.4/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU= -github.com/spf13/jwalterweatherman v1.0.0 h1:XHEdyB+EcvlqZamSM4ZOMGlc93t6AcsBEu9Gc1vn7yk= -github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= -github.com/spf13/pflag v0.0.0-20170130214245-9ff6c6923cff/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.1/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= -github.com/spf13/pflag v1.0.3 h1:zPAT6CGy6wXeQ7NtTnaTerfKOsV6V6F8agHXFiazDkg= -github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= -github.com/spf13/viper v1.3.2 h1:VUFqw5KcqRf7i70GOzW7N+Q7+gxVBkSSqiXB12+JQ4M= -github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s= -github.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/yZzE= +github.com/streadway/amqp v0.0.0-20190404075320-75d898a42a94/go.mod h1:AZpEONHx3DKn8O/DFsRAY58/XVQiIPMTMB1SddzLXVw= +github.com/streadway/amqp v0.0.0-20190827072141-edfb9018d271/go.mod h1:AZpEONHx3DKn8O/DFsRAY58/XVQiIPMTMB1SddzLXVw= +github.com/streadway/handy v0.0.0-20190108123426-d5acb3125c2a/go.mod h1:qNTQ5P5JnDBl6z3cMAg/SywNDC5ABu5ApDIw6lUbRmI= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1 h1:2vfRuCMp5sSVIDSqO8oNnWJq7mPa6KVP3iPIwFBuy8A= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/testify v0.0.0-20151208002404-e3a8ff8ce365/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= -github.com/stretchr/testify v1.2.0/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= -github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= -github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= -github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= -github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc= -github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= -github.com/woop/protoc-gen-doc v1.3.0 h1:adA9nDOuhsubIi7HaY7Bl+jn/r5OHP6KG+On3OzI13s= -github.com/woop/protoc-gen-doc v1.3.0/go.mod h1:/cPn1JCjHFIrRBAffIVBmWjOO/h+K5IYON1RjrWDdZw= -github.com/xiang90/probing v0.0.0-20160813154853-07dd2e8dfe18/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= +github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA= +github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= -github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= -github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= -go.opencensus.io v0.21.0 h1:mU6zScU4U1YAFPHEHYk+3JC4SY7JxgkqS10ZOSyksNg= -go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= -go.opencensus.io v0.22.1 h1:8dP3SGL7MPB94crU3bEPplMPe83FI4EouesJUeFHv50= -go.opencensus.io v0.22.1/go.mod h1:Ap50jQcDJrx6rB6VgeeFPtuPIf3wMRvRfrfYDO6+BmA= -go.opencensus.io v0.22.3 h1:8sGtKOrtQqkN1bp2AtX+misvLIlOmsEsNd+9NIcPEm8= -go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.uber.org/atomic v0.0.0-20181018215023-8dc6146f7569/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= -go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= -go.uber.org/multierr v0.0.0-20180122172545-ddea229ff1df/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= +github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/zeebo/assert v1.3.0 h1:g7C04CbJuIDKNPFHmsk4hwZDO5O+kntRxzaUoNXj+IQ= +github.com/zeebo/assert v1.3.0/go.mod h1:Pq9JiuJQpG8JLJdtkwrJESF0Foym2/D9XMU5ciN/wJ0= +github.com/zeebo/xxh3 v1.0.1/go.mod h1:8VHV24/3AZLn3b6Mlp/KuC33LWH687Wq6EnziEB+rsA= +github.com/zeebo/xxh3 v1.0.2 h1:xZmwmqxHZA8AI603jOQ0tMqmBr9lPeFwGg6d+xy9DC0= +github.com/zeebo/xxh3 v1.0.2/go.mod h1:5NWz9Sef7zIDm2JHfFlcQvNekmcEl9ekUZQQKCYaDcA= +go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= +go.etcd.io/etcd v0.0.0-20191023171146-3cf2f69b5738/go.mod h1:dnLIgRNXwCJa5e+c6mIZCrds/GIG4ncV9HhK5PX7jPg= +go.opencensus.io v0.20.1/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk= +go.opencensus.io v0.20.2/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk= +go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opentelemetry.io/otel v0.20.0/go.mod h1:Y3ugLH2oa81t5QO+Lty+zXf8zC9L26ax4Nzoxm/dooo= +go.opentelemetry.io/otel/metric v0.20.0/go.mod h1:598I5tYlH1vzBjn+BTuhzTCSb/9debfNp6R3s7Pr1eU= +go.opentelemetry.io/otel/oteltest v0.20.0/go.mod h1:L7bgKf9ZB7qCwT9Up7i9/pn0PWIa9FqQ2IQ8LoxiGnw= +go.opentelemetry.io/otel/sdk v0.20.0/go.mod h1:g/IcepuwNsoiX5Byy2nNV0ySUF1em498m7hBWC279Yc= +go.opentelemetry.io/otel/trace v0.20.0/go.mod h1:6GjCW8zgDjwGHGa6GkyeB8+/5vjT16gUEi0Nf1iBdgw= +go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= +go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= +go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= +go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= -go.uber.org/zap v0.0.0-20180814183419-67bc79d13d15/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= +go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= +go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU= +go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= +go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA= go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= +go.uber.org/zap v1.13.0/go.mod h1:zwrFLgMcdUuIBviXEYEH1YKNaOBnKXsx2IPda5bBwHM= +go.uber.org/zap v1.16.0/go.mod h1:MA8QOfq0BHJwdXa996Y4dYkAqRKB8/1K1QMMZVaNZjQ= golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= -golang.org/x/crypto v0.0.0-20181025213731-e84da0312774/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= -golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9 h1:mKdxBk7AujPs8kU4m80U72y/zjbZ3UcXC7dClwKbUI0= -golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= -golang.org/x/crypto v0.0.0-20190211182817-74369b46fc67/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20190611184440-5c40567a22f8/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190923035154-9ee001bba392/go.mod h1:/lpIB1dKB+9EgE3H3cr1v9wB50oz8l4C4h62xy7jSTY= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/exp v0.0.0-20180321215751-8460e604b9de/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20180807140117-3d87b88a115f/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190125153040-c74c464bbbf2/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190312203227-4b39c73a6495/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= +golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190731235908-ec7cb31e5a56/go.mod h1:JhuoJpWY28nO4Vef9tZUw9qufEGTyX1+7lmHxV5q5G4= +golang.org/x/exp v0.0.0-20191002040644-a1355ae1e2c3/go.mod h1:NOZ3BPKG0ec/BKJQgnvsSFpcKLM5xXVWnvZS97DWHgE= +golang.org/x/exp v0.0.0-20211216164055-b2b84827b756/go.mod h1:b9TAUYHmRtqA6klRHApnXMnj+OyLce4yF5cZCUbk2ps= +golang.org/x/exp v0.0.0-20220407100705-7b9b53b0aca4 h1:K3x+yU+fbot38x5bQbU2QqUAVyYLEktdNH2GxZLnM3U= +golang.org/x/exp v0.0.0-20220407100705-7b9b53b0aca4/go.mod h1:lgLbSvA5ygNOMpwM/9anMpWVlVJ7Z+cHWq/eFuinpGE= +golang.org/x/image v0.0.0-20180708004352-c73c2afc3b81/go.mod h1:ux5Hcp/YLpHSI86hEcLt0YII63i6oz57MZXIpbrjZUs= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= +golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/image v0.0.0-20190910094157-69e4b8554b2a/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/image v0.0.0-20200119044424-58c23975cae1/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/image v0.0.0-20200430140353-33d19683fad8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/image v0.0.0-20200618115811-c13761719519/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/image v0.0.0-20201208152932-35266b937fa6/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/image v0.0.0-20210216034530-4410531fe030/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= -golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3 h1:XQyxROzUlZH+WIQwySDgnISgOivlhjIEwaQaJEJrrN0= golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20200302205851-738671d3881b h1:Wh+f8QHJXR411sJR8/vRBTZ7YapZaRvUcLFFJhusH0k= -golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= +golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= +golang.org/x/mobile v0.0.0-20201217150744-e6ae53a27f4f/go.mod h1:skQtrUTUwhdJvXM/2KKJzY8pDgNr9I/FOMqDVRPBUS4= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= -golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.1.1-0.20191209134235-331c550502dd/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/net v0.0.0-20170114055629-f2499483f923/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= +golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= +golang.org/x/mod v0.6.0-dev.0.20211013180041-c96bc1413d57/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY= +golang.org/x/mod v0.6.0-dev.0.20211013180041-c96bc1413d57/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY= +golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3 h1:kQgndtyPBW/JIYERgdxfwMYh3AVStj88WQTlNDi2a+o= +golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20180826012351-8a410e7b638d h1:g9qWBGx4puODJTMVyoPrpoxPFgVGd+z1DZwjfRu4d0I= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181005035420-146acd28ed58/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190125091013-d26f9f9a57f3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190522155817-f3200d17e092/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190812203447-cdfb69ac37fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20191004110552-13f9640d40b9 h1:rjwSpXsdiK0dV8/Naq3kAw9ymfAeJIyd0upUIElB+lI= -golang.org/x/net v0.0.0-20191004110552-13f9640d40b9/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553 h1:efeOvDhwQ29Dj3SdAV/MJf8oukgn+8D8WgaCaRMchF8= -golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200320220750-118fecf932d8 h1:1+zQlQqEEhUeStBTi653GZAnAuivZq/2hz+Iz+OP7rg= -golang.org/x/net v0.0.0-20200320220750-118fecf932d8/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200513185701-a91f0712d120 h1:EZ3cVSzKOlJxAd8e8YAJ7no8nNypTxexh/YE/xW3ZEY= -golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= +golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk= +golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220407224826-aac1ed45d8e3 h1:EN5+DfgmRMvRUrMGERW2gQl3Vc+Z7ZMnI/xdEpPSf0c= +golang.org/x/net v0.0.0-20220407224826-aac1ed45d8e3/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20190402181905-9f3314589c9a/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sys v0.0.0-20170830134202-bb24a47a89ea/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a h1:1n5lsVfiQW3yfsRGu98756EH1YthsFqr/5mxHduZW2A= -golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190209173611-3b5209105503/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190616124812-15dcb6c0061f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190922100055-0a153f010e69/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe h1:6fAMxZRR6sl1Uq8U61gxU+kPTs2tR8uOySCbBP7BN/M= -golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200321134203-328b4cd54aae h1:3tcmuaB7wwSZtelmiv479UjUB+vviwABz7a133ZwOKQ= -golang.org/x/sys v0.0.0-20200321134203-328b4cd54aae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191220142924-d4481acd189f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9 h1:YTzHMGlqJu67/uEo1lBv0n3wBXhXNeUbB1XfN2vmTm0= -golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/text v0.0.0-20160726164857-2910a502d2bf/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210304124612-50617c2ba197/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220128215802-99c3d69c2c27/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220406163625-3f8b81556e12 h1:QyVthZKMsyaQwBTJE04jdNN0Pp5Fn9Qga0mrgxyERQM= +golang.org/x/sys v0.0.0-20220406163625-3f8b81556e12/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.1-0.20181227161524-e6919f6577db/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= -golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/time v0.0.0-20161028155119-f51c12702a4d/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20190921001708-c4c64cad1fd0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20180525024113-a5b4c53f6e8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20181011042414-1f849cf54d09/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190206041539-40960b6deb8e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= @@ -463,167 +539,104 @@ golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3 golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190614205625-5aca471b1d59 h1:QjA/9ArTfVTLfEhClDCG7SGrZkZixxWpwNCDiwJfh88= -golang.org/x/tools v0.0.0-20190614205625-5aca471b1d59/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190927191325-030b2cf1153e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200321014904-268ba720d32c h1:Qp5jXmUCqMiVq4676uW7bY2oskIR1ivTboSMn8qgeX0= -golang.org/x/tools v0.0.0-20200321014904-268ba720d32c/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= -golang.org/x/tools v0.0.0-20200321224714-0d839f3cf2ed h1:OCZDlBlLYiUK6T33/8+3BnojrS2W+Dg1rKYJhR89xGE= -golang.org/x/tools v0.0.0-20200321224714-0d839f3cf2ed/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= -golang.org/x/tools v0.0.0-20200414032229-332987a829c3 h1:Z68UA+HA9shnGhQbAFXKqL1Rk/tfiTHJ57bNm/MUL/A= -golang.org/x/tools v0.0.0-20200414032229-332987a829c3/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200504022951-6b6965ac5dd1 h1:C8rdnd6KieI73Z2Av0sS0t4kW+geIH/M8kNX8Hmvn9E= -golang.org/x/tools v0.0.0-20200504022951-6b6965ac5dd1/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200515220128-d3bf790afa53 h1:vmsb6v0zUdmUlXfwKaYrHPPRCV0lHq/IwNIf0ASGjyQ= -golang.org/x/tools v0.0.0-20200515220128-d3bf790afa53/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200519205726-57a9e4404bf7 h1:nm4zDh9WvH4jiuUpMY5RUsvOwrtTVVAsUaCdLW71hfY= -golang.org/x/tools v0.0.0-20200519205726-57a9e4404bf7/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200521211927-2b542361a4fc h1:6m2YO+AmBApbUOmhsghW+IfRyZOY4My4UYvQQrEpHfY= -golang.org/x/tools v0.0.0-20200521211927-2b542361a4fc/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200601175630-2caf76543d99/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200604042327-9b20fe4cabe8 h1:8Xr1qwxn90MXYKftwNxIO2g4J+26naghxFS5rYiTZww= -golang.org/x/tools v0.0.0-20200604042327-9b20fe4cabe8/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200622203043-20e05c1c8ffa h1:mMXQKlWCw9mIWgVLLfiycDZjMHMMYqiuakI4E/l2xcA= -golang.org/x/tools v0.0.0-20200622203043-20e05c1c8ffa/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200923182640-463111b69878 h1:VUw1+Jf6KJPf82mbTQMia6HCnNMv2BbAipkEZ4KTcqQ= -golang.org/x/tools v0.0.0-20200923182640-463111b69878/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU= -golang.org/x/tools v0.0.0-20200925191224-5d1fdd8fa346 h1:hzJjkvxUIF3bSt+v8N5tBQNx/605vszZJ+3XsIamzZo= -golang.org/x/tools v0.0.0-20200925191224-5d1fdd8fa346/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU= -golang.org/x/tools v0.0.0-20200928201943-a0ef9b62deab h1:CyH2SDm5ATQiX9gtbMYfvNNed97A9v+TJFnUX/fTaJY= -golang.org/x/tools v0.0.0-20200928201943-a0ef9b62deab/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU= -golang.org/x/tools v0.0.0-20200929223013-bf155c11ec6f h1:7+Nz9MyPqt2qMCTvNiRy1G0zYfkB7UCa+ayT6uVvbyI= -golang.org/x/tools v0.0.0-20200929223013-bf155c11ec6f/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU= -golang.org/x/tools v0.0.0-20201001230009-b5b87423c93b h1:07IVqnnzaip3TGyl/cy32V5YP3FguWG4BybYDTBNpm0= -golang.org/x/tools v0.0.0-20201001230009-b5b87423c93b/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU= -golang.org/x/tools v0.0.0-20201011145850-ed2f50202694 h1:BANdcOVw3KTuUiyfDp7wrzCpkCe8UP3lowugJngxBTg= -golang.org/x/tools v0.0.0-20201011145850-ed2f50202694/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU= -golang.org/x/tools v0.0.0-20201013053347-2db1cd791039 h1:kLBxO4OPBgPwjg8Vvu+/0DCHIfDwYIGNFcD66NU9kpo= -golang.org/x/tools v0.0.0-20201013053347-2db1cd791039/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU= -golang.org/x/tools v0.0.0-20201015182029-a5d9e455e9c4 h1:rQWkJiVIyJ3PgiSHL+RXc8xbrK8duU6jG5eeZ9G7nk8= -golang.org/x/tools v0.0.0-20201015182029-a5d9e455e9c4/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU= -golang.org/x/tools v0.0.0-20201017001424-6003fad69a88 h1:ZB1XYzdDo7c/O48jzjMkvIjnC120Z9/CwgDWhePjQdQ= -golang.org/x/tools v0.0.0-20201017001424-6003fad69a88/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU= -golang.org/x/tools v0.0.0-20201124005743-911501bfb504 h1:jOKV2ysikH1GANB7t2LotmhyvkkPvl7HQoEXkV6slJA= -golang.org/x/tools v0.0.0-20201124005743-911501bfb504/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20200103221440-774c71fcf114/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200117012304-6edc0a871e69/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= +golang.org/x/tools v0.1.8-0.20211029000441-d6a9af8af023/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= +golang.org/x/tools v0.1.9/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= +golang.org/x/tools v0.1.10 h1:QjFRCZxdOhBJ/UNgnBZLbNV13DlbnK0quyivTnXJM20= +golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -gonum.org/v1/gonum v0.0.0-20190331200053-3d26580ed485/go.mod h1:2ltnJ7xHfj0zHS40VVPYEAAMTa3ZGguvHGBSJeRWqE0= +gonum.org/v1/gonum v0.0.0-20180816165407-929014505bf4/go.mod h1:Y+Yx5eoAFn32cQvJDxZx5Dpnq+c3wtXuadVZAcxbbBo= +gonum.org/v1/gonum v0.8.2/go.mod h1:oe/vMfY3deqTw+1EZJhuvEW2iwGF1bW9wwu7XCu0+v0= +gonum.org/v1/gonum v0.9.3 h1:DnoIG+QAMaF5NvxnGe/oKsgKcAc6PcUyl8q0VetfQ8s= +gonum.org/v1/gonum v0.9.3/go.mod h1:TZumC3NeyVQskjXqmyWt4S3bINhy7B4eYwW69EbyX+0= gonum.org/v1/netlib v0.0.0-20190313105609-8cb42192e0e0/go.mod h1:wa6Ws7BG/ESfp6dHfk7C6KdzKA7wR7u/rKwOGE66zvw= -gonum.org/v1/netlib v0.0.0-20190331212654-76723241ea4e/go.mod h1:kS+toOQn6AQKjmKJ7gzohV1XkqsFehRA2FbsbkopSuQ= -google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= +gonum.org/v1/plot v0.0.0-20190515093506-e2840ee46a6b/go.mod h1:Wt8AAjI+ypCyYX3nZBvf6cAIx93T+c/OS2HFAYskSZc= +gonum.org/v1/plot v0.9.0/go.mod h1:3Pcqqmp6RHvJI72kgb8fThyUnav364FOsdDo2aGW5lY= +google.golang.org/api v0.3.1/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.2.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/genproto v0.0.0-20170731182057-09f6ed296fc6/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= -google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8 h1:Nw54tB0rB7hY/N0NQvRW8DG4Yk3Q6T9cu9RcFQDu1tc= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7 h1:ZUjXAXmrAyrmmCPHgCA/vChHcpsX27MZ3yBonD/z1KE= -google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb h1:i1Ppqkc3WQXikh8bXiwHqAN5Rv3/qDCcRk0/Otx73BY= google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55 h1:gSJIx1SDwno+2ElGhA4+qG2zF97qiUzTM+rQ0klBOcE= +google.golang.org/genproto v0.0.0-20190530194941-fb225487d101/go.mod h1:z3L6/3dTEVtUr6QSP8miRzeRqwQOioJ9I66odjN4I7s= google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= -google.golang.org/genproto v0.0.0-20200319113533-08878b785e9c h1:5aI3/f/3eCZps9xwoEnmgfDJDhMbnJpfqeGpjVNgVEI= -google.golang.org/genproto v0.0.0-20200319113533-08878b785e9c/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587 h1:1Ym+vvUpq1ZHvxzn34gENJX8U4aKO+vhy2P/2+Xl6qQ= -google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= -google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013 h1:+kGHl1aib/qcwaRi1CbqBZ1rk19r85MNUf8HaBghugY= +google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= -google.golang.org/grpc v1.13.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= +google.golang.org/genproto v0.0.0-20220126215142-9970aeb2e350/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20220407144326-9054f6ed7bac h1:qSNTkEN+L2mvWcLgJOR+8bdHX9rN/IdU3A1Ghpfb1Rg= +google.golang.org/genproto v0.0.0-20220407144326-9054f6ed7bac/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= -google.golang.org/grpc v1.19.1 h1:TrBcJ1yqAl1G++wO39nD/qtgpsW9/1+QGrluyMGEYgM= -google.golang.org/grpc v1.19.1/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.20.0/go.mod h1:chYK+tFQF0nDUGJgXMSgLCQk3phJEuONr2DCgLDdAQM= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= -google.golang.org/grpc v1.23.0 h1:AzbTB6ux+okLTzP8Ru1Xs41C303zdcfEht7MQnYJt5A= +google.golang.org/grpc v1.22.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= -google.golang.org/grpc v1.24.0 h1:vb/1TCsVn3DcJlQ0Gs1yB1pKI6Do2/QNwxdKqmc/b0s= -google.golang.org/grpc v1.24.0/go.mod h1:XDChyiUovWa60DnaeDeZmSW86xtLtjtZbwvSiRnRtcA= +google.golang.org/grpc v1.23.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.28.0 h1:bO/TA4OxCOummhSf10siHuG7vJOiwh7SpRpFZDkOgl4= -google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= -google.golang.org/grpc v1.29.1 h1:EC2SB8S04d2r73uptxphDSUG+kTKVgjRPF+N3xpxRB4= -google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= +google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= +google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= +google.golang.org/grpc v1.44.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= +google.golang.org/grpc v1.45.0 h1:NEpgUqV3Z+ZjkqMsxMg11IaDrXY4RY6CQukSGK0uI1M= +google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= -google.golang.org/protobuf v1.21.0 h1:qdOKuR/EIArgaWNjetjgTzgVTAZ+S/WXVrq9HW9zimw= google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.23.0 h1:4MY060fB1DLGMB/7MBTLnwQUY6+F09GEiz6SsrNqyzM= google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.24.0 h1:UhZDfRO8JRQru4/+LlLE0BRKGF8L+PICnvYZmx/fEGA= -google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= -google.golang.org/protobuf v1.25.0 h1:Ejskq+SyPohKW+1uil0JJMtmHCgJPJ/qWTxr8qp+R4c= google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.28.0 h1:w43yiav+6bVFTBQFZX0r7ipe9JQ1QsbMgHwbBziscLw= +google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/cheggaaa/pb.v1 v1.0.25/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= -gopkg.in/inf.v0 v0.9.0/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= -gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= -gopkg.in/natefinch/lumberjack.v2 v2.0.0-20150622162204-20b71e5b60d7/go.mod h1:l0ndWWf7gzL7RNwBG7wST/UCcT4T24xpD6X8LsfU/+k= +gopkg.in/gcfg.v1 v1.2.3/go.mod h1:yesOnuUOFQAhST5vPY4nbZsb/huCgGGXlipJsBn0b3o= gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= -gopkg.in/russross/blackfriday.v2 v2.0.0 h1:+FlnIV8DSQnT7NZ43hcVKcdJdzZoeCmJj4Ql8gq5keA= -gopkg.in/russross/blackfriday.v2 v2.0.0/go.mod h1:6sSBNz/GtOm/pJTuh5UmBK2ZHfmnxGbl2NZg1UliSOI= -gopkg.in/square/go-jose.v2 v2.0.0-20180411045311-89060dee6a84/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= -gopkg.in/yaml.v1 v1.0.0-20140924161607-9f9df34309c0/go.mod h1:WDnlLJ4WF5VGsH/HVa3CI79GS0ol3YnhVnKP89i0kNg= +gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74= gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.4 h1:/eiJrUcujPVeJ3xlSWaiNi3uSVmDGBK1pDHUHAnao1I= +gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gotest.tools v2.2.0+incompatible/go.mod h1:DsYFclhRJ6vuDpmuTbkuFWG+y2sxOXAzmJt81HFBacw= +gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -istio.io/gogo-genproto v0.0.0-20191009201739-17d570f95998 h1:uvW88tRzT6wjR+SdqPoDjR0jjKTYWmjOp2Z1jXs1ZJQ= -istio.io/gogo-genproto v0.0.0-20191009201739-17d570f95998/go.mod h1:OzpAts7jljZceG4Vqi5/zXy/pOg1b209T3jb7Nv5wIs= -istio.io/gogo-genproto v0.0.0-20191212213402-78a529a42cd8 h1:vcvyAXMWwGwTlFdFT36QlzYg5Ggm9p9/FYcMjf2vZnQ= -istio.io/gogo-genproto v0.0.0-20191212213402-78a529a42cd8/go.mod h1:OzpAts7jljZceG4Vqi5/zXy/pOg1b209T3jb7Nv5wIs= -istio.io/tools v0.0.0-20191228030621-c4eb6a11039c h1:PvYMeezZgkWY/17LYGSI5Xog/7IGk9FGXu6QHgn2mVg= -istio.io/tools v0.0.0-20191228030621-c4eb6a11039c/go.mod h1:6u5K87o8AZvfCdPhr0M60yuZR5/4pPCOCiokv7P+I+0= -k8s.io/api v0.0.0-20191004120003-3a12735a829a/go.mod h1:ceHJE/vDjU8jKnRV6Vqn/+vyZmC6NvOluInN+RhQkIs= -k8s.io/api v0.0.0-20191016110408-35e52d86657a/go.mod h1:/L5qH+AD540e7Cetbui1tuJeXdmNhO8jM6VkXeDdDhQ= -k8s.io/apiextensions-apiserver v0.0.0-20191011152811-a1d7614a8e0f/go.mod h1:ANlyWoQfvraFQiqL/eCLO0r19z0pA0J7eXfRx94lj9Y= -k8s.io/apimachinery v0.0.0-20191004115701-31ade1b30762/go.mod h1:Xc10RHc1U+F/e9GCloJ8QAeCGevSVP5xhOhqlE+e1kM= -k8s.io/apimachinery v0.0.0-20191004115801-a2eda9f80ab8/go.mod h1:llRdnznGEAqC3DcNm6yEj472xaFVfLM7hnYofMb12tQ= -k8s.io/apiserver v0.0.0-20191004121824-7b46ba88e365/go.mod h1:EB3CoZ8WNX95G9ftm5Wc/qdc9qqbxgyYHIVJtmiOLUs= -k8s.io/client-go v0.0.0-20191004120415-b2f42092e376/go.mod h1:ksVkYlACXo9hR9AV+cYyCkuWL1xnWcGtAFxsfqMcozg= -k8s.io/client-go v0.0.0-20191016111102-bec269661e48/go.mod h1:hrwktSwYGI4JK+TJA3dMaFyyvHVi/aLarVHpbs8bgCU= -k8s.io/code-generator v0.0.0-20190612205613-18da4a14b22b/go.mod h1:G8bQwmHm2eafm5bgtX67XDZQ8CWKSGu9DekI+yN4Y5I= -k8s.io/component-base v0.0.0-20191004121406-d5138742ad72/go.mod h1:zT8T6A3K4wLlbQkLUC62skjmWoiNJ9B8WUQj3KIvcrQ= -k8s.io/gengo v0.0.0-20190116091435-f8a0810f38af/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0= -k8s.io/gengo v0.0.0-20190128074634-0689ccc1d7d6/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0= -k8s.io/gengo v0.0.0-20190822140433-26a664648505/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0= -k8s.io/helm v2.12.0+incompatible/go.mod h1:LZzlS4LQBHfciFOurYBFkCMTaZ0D1l+p0teMg7TSULI= -k8s.io/klog v0.0.0-20181102134211-b9b56d5dfc92/go.mod h1:Gq+BEi5rUBO/HRz0bTSXDUcqjScdoY3a9IHpCEIOOfk= -k8s.io/klog v0.3.0/go.mod h1:Gq+BEi5rUBO/HRz0bTSXDUcqjScdoY3a9IHpCEIOOfk= -k8s.io/klog v0.3.1/go.mod h1:Gq+BEi5rUBO/HRz0bTSXDUcqjScdoY3a9IHpCEIOOfk= -k8s.io/klog v0.4.0/go.mod h1:4Bi6QPql/J/LkTDqv7R/cd3hPo4k2DG6Ptcz060Ez5I= -k8s.io/kube-openapi v0.0.0-20190228160746-b3a7cee44a30/go.mod h1:BXM9ceUBTj2QnfH2MK1odQs778ajze1RxcmP6S8RVVc= -k8s.io/kube-openapi v0.0.0-20190816220812-743ec37842bf/go.mod h1:1TqjTSzOxsLGIKfj0lK8EeCP7K1iUG65v09OM0/WG5E= -k8s.io/utils v0.0.0-20190221042446-c2654d5206da/go.mod h1:8k8uAuAQ0rXslZKaEWd0c3oVhZz7sSzSiPnVZayjIX0= -k8s.io/utils v0.0.0-20190801114015-581e00157fb1/go.mod h1:sZAwmy6armz5eXlNoLmJcl4F1QuKu7sr+mFQ0byX7Ew= -modernc.org/cc v1.0.0/go.mod h1:1Sk4//wdnYJiUIxnW8ddKpaOJCF37yAdqYnkxUpaYxw= -modernc.org/golex v1.0.0/go.mod h1:b/QX9oBD/LhixY6NDh+IdGv17hgB+51fET1i2kPSmvk= -modernc.org/mathutil v1.0.0/go.mod h1:wU0vUrJsVWBZ4P6e7xtFJEhFSNsfRLJ8H458uRjg03k= -modernc.org/strutil v1.0.0/go.mod h1:lstksw84oURvj9y3tn8lGvRxyRC1S2+g5uuIzNfIOBs= -modernc.org/xc v1.0.0/go.mod h1:mRNCo0bvLjGhHO9WsyuKVU4q0ceiDDDoEeWDJHrNx8I= -sigs.k8s.io/structured-merge-diff v0.0.0-20190302045857-e85c7b244fd2/go.mod h1:wWxsB5ozmmv/SG7nM11ayaAW51xMvak/t1r0CSlcokI= -sigs.k8s.io/structured-merge-diff v0.0.0-20190525122527-15d366b2352e/go.mod h1:wWxsB5ozmmv/SG7nM11ayaAW51xMvak/t1r0CSlcokI= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= +honnef.co/go/tools v0.1.3/go.mod h1:NgwopIslSNH47DimFoV78dnkksY2EFtX0ajyb3K/las= +rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4= sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o= +sourcegraph.com/sourcegraph/appdash v0.0.0-20190731080439-ebfcffb1b5c0/go.mod h1:hI742Nqp5OhwiqlzhgfbWU4mW4yO10fP+LoT9WOswdU= diff --git a/sdk/python/feast/protos/feast/__init__.py b/go/cmd/server/logging/feature_repo/__init__.py similarity index 100% rename from sdk/python/feast/protos/feast/__init__.py rename to go/cmd/server/logging/feature_repo/__init__.py diff --git a/go/cmd/server/logging/feature_repo/data/online_store.db b/go/cmd/server/logging/feature_repo/data/online_store.db new file mode 100644 index 0000000000..b6ccea139e Binary files /dev/null and b/go/cmd/server/logging/feature_repo/data/online_store.db differ diff --git a/go/cmd/server/logging/feature_repo/driver_stats.parquet b/go/cmd/server/logging/feature_repo/driver_stats.parquet new file mode 100644 index 0000000000..a1e196df26 Binary files /dev/null and b/go/cmd/server/logging/feature_repo/driver_stats.parquet differ diff --git a/sdk/python/feast/templates/aws/example.py b/go/cmd/server/logging/feature_repo/example.py similarity index 80% rename from sdk/python/feast/templates/aws/example.py rename to go/cmd/server/logging/feature_repo/example.py index f9f2b3b6eb..f78470efd5 100644 --- a/sdk/python/feast/templates/aws/example.py +++ b/go/cmd/server/logging/feature_repo/example.py @@ -2,14 +2,14 @@ from google.protobuf.duration_pb2 import Duration -from feast import Entity, Feature, FeatureView, FileSource, ValueType +from feast import Entity, Feature, FeatureView, FileSource, ValueType, FeatureService # Read data from parquet files. Parquet is convenient for local development mode. For # production, you can use your favorite DWH, such as BigQuery. See Feast documentation # for more info. driver_hourly_stats = FileSource( - path="%PARQUET_PATH%", - event_timestamp_column="datetime", + path="driver_stats.parquet", + timestamp_field="event_timestamp", created_timestamp_column="created", ) @@ -23,13 +23,18 @@ driver_hourly_stats_view = FeatureView( name="driver_hourly_stats", entities=["driver_id"], - ttl=Duration(seconds=86400 * 1), + ttl=Duration(seconds=86400 * 365 * 10), features=[ Feature(name="conv_rate", dtype=ValueType.FLOAT), Feature(name="acc_rate", dtype=ValueType.FLOAT), Feature(name="avg_daily_trips", dtype=ValueType.INT64), ], online=True, - input=driver_hourly_stats, + batch_source=driver_hourly_stats, tags={}, ) + +driver_stats_fs = FeatureService( + name="test_service", + features=[driver_hourly_stats_view] +) \ No newline at end of file diff --git a/go/cmd/server/logging/feature_repo/feature_store.yaml b/go/cmd/server/logging/feature_repo/feature_store.yaml new file mode 100644 index 0000000000..3b48f43287 --- /dev/null +++ b/go/cmd/server/logging/feature_repo/feature_store.yaml @@ -0,0 +1,5 @@ +project: feature_repo +registry: data/registry.db +provider: local +online_store: + path: data/online_store.db \ No newline at end of file diff --git a/go/cmd/server/logging/filelogstorage.go b/go/cmd/server/logging/filelogstorage.go new file mode 100644 index 0000000000..19e9569e69 --- /dev/null +++ b/go/cmd/server/logging/filelogstorage.go @@ -0,0 +1,86 @@ +package logging + +import ( + "errors" + "fmt" + "io" + "os" + "path/filepath" + + "github.com/apache/arrow/go/v8/arrow/array" + "github.com/apache/arrow/go/v8/parquet" + "github.com/apache/arrow/go/v8/parquet/pqarrow" + "github.com/feast-dev/feast/go/internal/feast/registry" +) + +type FileLogStorage struct { + // Feast project name + project string + path string +} + +func GetFileConfig(config *registry.RepoConfig) (*OfflineLogStoreConfig, error) { + fileConfig := OfflineLogStoreConfig{ + storeType: "file", + } + if onlineStorePath, ok := config.OfflineStore["path"]; ok { + path, success := onlineStorePath.(string) + if !success { + return &fileConfig, fmt.Errorf("path, %s, cannot be converted to string", path) + } + fileConfig.path = path + } else { + return nil, errors.New("need path for file log storage") + } + return &fileConfig, nil +} + +// This offline store is currently only used for testing. It will be instantiated during go unit tests to log to file +// and the parquet files will be cleaned up after the test is run. +func NewFileOfflineStore(project string, offlineStoreConfig *OfflineLogStoreConfig) (*FileLogStorage, error) { + store := FileLogStorage{project: project} + var absPath string + var err error + // TODO(kevjumba) remove this default catch. + if offlineStoreConfig.path != "" { + absPath, err = filepath.Abs(offlineStoreConfig.path) + } else { + return nil, errors.New("need path for file log storage") + } + if err != nil { + return nil, err + } + store.path = absPath + return &store, nil +} + +func openLogFile(absPath string) (*os.File, error) { + var _, err = os.Stat(absPath) + + // create file if not exists + if os.IsNotExist(err) { + var file, err = os.Create(absPath) + if err != nil { + return nil, err + } + return file, nil + } else { + return nil, fmt.Errorf("path %s already exists", absPath) + } +} + +func (f *FileLogStorage) FlushToStorage(tbl array.Table) error { + w, err := openLogFile(f.path) + var writer io.Writer = w + if err != nil { + return err + } + props := parquet.NewWriterProperties(parquet.WithDictionaryDefault(false)) + arrProps := pqarrow.DefaultWriterProps() + err = pqarrow.WriteTable(tbl, writer, 100, props, arrProps) + if err != nil { + return err + } + return nil + +} diff --git a/go/cmd/server/logging/filelogstorage_test.go b/go/cmd/server/logging/filelogstorage_test.go new file mode 100644 index 0000000000..1da7dd38ad --- /dev/null +++ b/go/cmd/server/logging/filelogstorage_test.go @@ -0,0 +1,70 @@ +package logging + +import ( + "context" + "path/filepath" + + "testing" + + "github.com/apache/arrow/go/v8/arrow/array" + "github.com/apache/arrow/go/v8/arrow/memory" + "github.com/apache/arrow/go/v8/parquet/file" + "github.com/apache/arrow/go/v8/parquet/pqarrow" + "github.com/feast-dev/feast/go/internal/test" + "github.com/stretchr/testify/assert" +) + +func TestFlushToStorage(t *testing.T) { + ctx := context.Background() + table, expectedSchema, expectedColumns, err := GetTestArrowTableAndExpectedResults() + defer table.Release() + assert.Nil(t, err) + offlineStoreConfig := OfflineLogStoreConfig{ + storeType: "file", + path: "./log.parquet", + } + fileStore, err := NewFileOfflineStore("test", &offlineStoreConfig) + assert.Nil(t, err) + err = fileStore.FlushToStorage(array.Table(table)) + assert.Nil(t, err) + logPath, err := filepath.Abs(offlineStoreConfig.path) + assert.Nil(t, err) + pf, err := file.OpenParquetFile(logPath, false) + assert.Nil(t, err) + + reader, err := pqarrow.NewFileReader(pf, pqarrow.ArrowReadProperties{}, memory.DefaultAllocator) + assert.Nil(t, err) + + tbl, err := reader.ReadTable(ctx) + assert.Nil(t, err) + tr := array.NewTableReader(tbl, -1) + defer tbl.Release() + + defer tr.Release() + for tr.Next() { + rec := tr.Record() + assert.NotNil(t, rec) + for _, field := range rec.Schema().Fields() { + assert.Contains(t, expectedSchema, field.Name) + assert.Equal(t, field.Type, expectedSchema[field.Name]) + } + values, err := test.GetProtoFromRecord(rec) + + assert.Nil(t, err) + for name, val := range values { + if name == "RequestId" { + // Ensure there are request ids in record. + assert.Greater(t, len(val.Val), 0) + } else { + assert.Equal(t, len(val.Val), len(expectedColumns[name].Val)) + for idx, featureVal := range val.Val { + assert.Equal(t, featureVal.Val, expectedColumns[name].Val[idx].Val) + } + } + } + } + + err = test.CleanUpFile(logPath) + assert.Nil(t, err) + +} diff --git a/go/cmd/server/logging/logging.go b/go/cmd/server/logging/logging.go new file mode 100644 index 0000000000..010644709a --- /dev/null +++ b/go/cmd/server/logging/logging.go @@ -0,0 +1,392 @@ +package logging + +import ( + "errors" + "fmt" + "log" + "time" + + "github.com/apache/arrow/go/v8/arrow" + "github.com/apache/arrow/go/v8/arrow/array" + "github.com/apache/arrow/go/v8/arrow/memory" + "github.com/feast-dev/feast/go/internal/feast" + "github.com/feast-dev/feast/go/internal/feast/model" + "github.com/feast-dev/feast/go/protos/feast/serving" + "github.com/feast-dev/feast/go/protos/feast/types" + gotypes "github.com/feast-dev/feast/go/types" + "google.golang.org/protobuf/types/known/timestamppb" +) + +const DEFAULT_LOG_FLUSH_INTERVAL = 100 * time.Millisecond +const DEFAULT_LOG_INSERT_TIMEOUT = 20 * time.Millisecond + +type Log struct { + // Example: val{int64_val: 5017}, val{int64_val: 1003} + EntityValue []*types.Value + + FeatureValues []*types.Value + FeatureStatuses []serving.FieldStatus + EventTimestamps []*timestamppb.Timestamp + RequestContext map[string]*types.Value + RequestId string +} + +type MemoryBuffer struct { + featureService *model.FeatureService + logs []*Log +} + +type LoggingService struct { + memoryBuffer *MemoryBuffer + logChannel chan *Log + fs *feast.FeatureStore + offlineLogStorage OfflineLogStorage + logInsertTTl time.Duration + logFlushInterval time.Duration +} + +func NewLoggingService(fs *feast.FeatureStore, logChannelCapacity int, featureServiceName string, enableLogProcessing bool) (*LoggingService, error) { + var featureService *model.FeatureService = nil + var err error + if fs != nil { + featureService, err = fs.GetFeatureService(featureServiceName) + if err != nil { + return nil, err + } + + } + + loggingService := &LoggingService{ + logChannel: make(chan *Log, logChannelCapacity), + memoryBuffer: &MemoryBuffer{ + logs: make([]*Log, 0), + featureService: featureService, + }, + fs: fs, + logInsertTTl: DEFAULT_LOG_INSERT_TIMEOUT, + logFlushInterval: DEFAULT_LOG_FLUSH_INTERVAL, + } + + if fs != nil { + offlineLogStorage, err := NewOfflineStore(fs.GetRepoConfig()) + loggingService.offlineLogStorage = offlineLogStorage + if err != nil { + return nil, err + } + } + + // Start goroutine to process logs + if enableLogProcessing { + go loggingService.processLogs() + + } + return loggingService, nil +} + +func (s *LoggingService) EmitLog(l *Log) error { + select { + case s.logChannel <- l: + return nil + case <-time.After(s.logInsertTTl): + return fmt.Errorf("could not add to log channel with capacity %d. Operation timed out. Current log channel length is %d", cap(s.logChannel), len(s.logChannel)) + } +} + +func (s *LoggingService) processLogs() { + // start a periodic flush + // TODO(kevjumba): set param so users can configure flushing duration + ticker := time.NewTicker(s.logFlushInterval) + defer ticker.Stop() + + for { + s.PerformPeriodicAppendToMemoryBufferAndLogFlush(ticker) + } +} + +// Select that either ingests new logs that are added to the logging channel, one at a time to add +// to the in-memory buffer or flushes all of them synchronously to the OfflineStorage on a time interval. +func (s *LoggingService) PerformPeriodicAppendToMemoryBufferAndLogFlush(t *time.Ticker) { + select { + case t := <-t.C: + s.flushLogsToOfflineStorage(t) + case new_log := <-s.logChannel: + log.Printf("Adding %s to memory.\n", new_log.FeatureValues) + s.memoryBuffer.logs = append(s.memoryBuffer.logs, new_log) + } +} + +// Acquires the logging schema from the feature service, converts the memory buffer array of rows of logs and flushes +// them to the offline storage. +func (s *LoggingService) flushLogsToOfflineStorage(t time.Time) error { + offlineStoreType, ok := getOfflineStoreType(s.fs.GetRepoConfig().OfflineStore) + if !ok { + return fmt.Errorf("could not get offline storage type for config: %s", s.fs.GetRepoConfig().OfflineStore) + } + if offlineStoreType == "file" { + entityMap, featureViews, odfvs, err := s.GetFcos() + if err != nil { + return err + } + schema, err := GetSchemaFromFeatureService(s.memoryBuffer.featureService, entityMap, featureViews, odfvs) + if err != nil { + return err + } + table, err := ConvertMemoryBufferToArrowTable(s.memoryBuffer, schema) + if err != nil { + return err + } + s.offlineLogStorage.FlushToStorage(table) + if err != nil { + return err + } + s.memoryBuffer.logs = s.memoryBuffer.logs[:0] + } else { + // Currently don't support any other offline flushing. + return errors.New("currently only file type is supported for offline log storage") + } + return nil +} + +// Takes memory buffer of logs in array row and converts them to columnar with generated fcoschema generated by GetFcoSchema +// and writes them to arrow table. +// Returns arrow table that contains all of the logs in columnar format. +func ConvertMemoryBufferToArrowTable(memoryBuffer *MemoryBuffer, fcoSchema *Schema) (array.Table, error) { + arrowMemory := memory.NewGoAllocator() + + columnNameToProtoValueArray := make(map[string][]*types.Value) + columnNameToStatus := make(map[string][]int32) + columnNameToTimestamp := make(map[string][]int64) + entityNameToEntityValues := make(map[string][]*types.Value) + + strBuilder := array.NewStringBuilder(arrowMemory) + + for _, l := range memoryBuffer.logs { + // EntityTypes maps an entity name to the specific type and also which index in the entityValues array it is + // e.g if an Entity Key is {driver_id, customer_id}, then the driver_id entitytype would be dtype=int64, index=0. + // It's in the order of the entities as given by the schema. + for idx, joinKey := range fcoSchema.Entities { + if _, ok := entityNameToEntityValues[joinKey]; !ok { + entityNameToEntityValues[joinKey] = make([]*types.Value, 0) + } + entityNameToEntityValues[joinKey] = append(entityNameToEntityValues[joinKey], l.EntityValue[idx]) + } + + // Contains both fv and odfv feature value types => they are processed in order of how the appear in the featureService + for idx, featureName := range fcoSchema.Features { + // populate the proto value arrays with values from memory buffer in separate columns one for each feature name + if _, ok := columnNameToProtoValueArray[featureName]; !ok { + columnNameToProtoValueArray[featureName] = make([]*types.Value, 0) + columnNameToStatus[featureName] = make([]int32, 0) + columnNameToTimestamp[featureName] = make([]int64, 0) + } + columnNameToProtoValueArray[featureName] = append(columnNameToProtoValueArray[featureName], l.FeatureValues[idx]) + columnNameToStatus[featureName] = append(columnNameToStatus[featureName], int32(l.FeatureStatuses[idx])) + columnNameToTimestamp[featureName] = append(columnNameToTimestamp[featureName], l.EventTimestamps[idx].AsTime().UnixNano()/int64(time.Millisecond)) + } + strBuilder.Append(l.RequestId) + } + + fields := make([]arrow.Field, 0) + columns := make([]array.Interface, 0) + for _, entityName := range fcoSchema.Entities { + protoArr := entityNameToEntityValues[entityName] + if len(protoArr) == 0 { + break + } + valArrowArray, err := gotypes.ProtoValuesToArrowArray(protoArr, arrowMemory, len(columnNameToProtoValueArray)) + if err != nil { + return nil, err + } + arrowType, err := gotypes.ValueTypeEnumToArrowType(fcoSchema.EntityTypes[entityName]) + if err != nil { + return nil, err + } + fields = append(fields, arrow.Field{ + Name: entityName, + Type: arrowType, + }) + columns = append(columns, valArrowArray) + } + + for _, featureName := range fcoSchema.Features { + + protoArr := columnNameToProtoValueArray[featureName] + if len(protoArr) == 0 { + break + } + arrowArray, err := gotypes.ProtoValuesToArrowArray(protoArr, arrowMemory, len(columnNameToProtoValueArray)) + if err != nil { + return nil, err + } + + arrowType, err := gotypes.ValueTypeEnumToArrowType(fcoSchema.FeaturesTypes[featureName]) + + if err != nil { + return nil, err + } + fields = append(fields, arrow.Field{ + Name: featureName, + Type: arrowType, + }) + columns = append(columns, arrowArray) + } + fields = append(fields, arrow.Field{ + Name: "RequestId", + Type: &arrow.StringType{}, + }) + + columns = append(columns, strBuilder.NewArray()) + schema := arrow.NewSchema( + fields, + nil, + ) + + result := array.Record(array.NewRecord(schema, columns, int64(len(memoryBuffer.logs)))) + + tbl := array.NewTableFromRecords(schema, []array.Record{result}) + return array.Table(tbl), nil +} + +type Schema struct { + Entities []string + Features []string + EntityTypes map[string]types.ValueType_Enum + FeaturesTypes map[string]types.ValueType_Enum +} + +func GetSchemaFromFeatureService(featureService *model.FeatureService, entityMap map[string]*model.Entity, featureViews []*model.FeatureView, onDemandFeatureViews []*model.OnDemandFeatureView) (*Schema, error) { + fvs := make(map[string]*model.FeatureView) + odFvs := make(map[string]*model.OnDemandFeatureView) + + joinKeys := make([]string, 0) + // All joinkeys in the featureService are put in here + joinKeysSet := make(map[string]interface{}) + entityJoinKeyToType := make(map[string]types.ValueType_Enum) + var entities []string + for _, featureView := range featureViews { + fvs[featureView.Base.Name] = featureView + entities = featureView.Entities + } + + for _, onDemandFeatureView := range onDemandFeatureViews { + odFvs[onDemandFeatureView.Base.Name] = onDemandFeatureView + } + + allFeatureTypes := make(map[string]types.ValueType_Enum) + features := make([]string, 0) + for _, featureProjection := range featureService.Projections { + // Create copies of FeatureView that may contains the same *FeatureView but + // each differentiated by a *FeatureViewProjection + featureViewName := featureProjection.Name + if fv, ok := fvs[featureViewName]; ok { + for _, f := range featureProjection.Features { + features = append(features, GetFullFeatureName(featureViewName, f.Name)) + allFeatureTypes[GetFullFeatureName(featureViewName, f.Name)] = f.Dtype + } + for _, entityName := range fv.Entities { + entity := entityMap[entityName] + if joinKeyAlias, ok := featureProjection.JoinKeyMap[entity.JoinKey]; ok { + joinKeysSet[joinKeyAlias] = nil + } else { + joinKeysSet[entity.JoinKey] = nil + } + } + } else if _, ok := odFvs[featureViewName]; ok { + for _, f := range featureProjection.Features { + // TODO(kevjumba) check in test here. + features = append(features, GetFullFeatureName(featureViewName, f.Name)) + allFeatureTypes[GetFullFeatureName(featureViewName, f.Name)] = f.Dtype + } + } else { + return nil, fmt.Errorf("no such feature view found in feature service %s", featureViewName) + } + } + + // Only get entities in the current feature service. + for _, entity := range entities { + if _, ok := joinKeysSet[entity]; ok { + joinKeys = append(joinKeys, entityMap[entity].JoinKey) + entityJoinKeyToType[entityMap[entity].JoinKey] = entityMap[entity].ValueType + } + } + + schema := &Schema{ + Entities: joinKeys, + Features: features, + EntityTypes: entityJoinKeyToType, + FeaturesTypes: allFeatureTypes, + } + return schema, nil +} + +func GetFullFeatureName(featureViewName string, featureName string) string { + return fmt.Sprintf("%s__%s", featureViewName, featureName) +} + +func (s *LoggingService) GetFcos() (map[string]*model.Entity, []*model.FeatureView, []*model.OnDemandFeatureView, error) { + odfvs, err := s.fs.ListOnDemandFeatureViews() + if err != nil { + return nil, nil, nil, err + } + fvs, err := s.fs.ListFeatureViews() + if err != nil { + return nil, nil, nil, err + } + entities, err := s.fs.ListEntities(true) + if err != nil { + return nil, nil, nil, err + } + entityMap := make(map[string]*model.Entity) + for _, entity := range entities { + entityMap[entity.Name] = entity + } + return entityMap, fvs, odfvs, nil +} + +func (l *LoggingService) GenerateLogs(featureService *model.FeatureService, joinKeyToEntityValues map[string][]*types.Value, features []*serving.GetOnlineFeaturesResponse_FeatureVector, requestData map[string]*types.RepeatedValue, requestId string) error { + if len(features) <= 0 { + return nil + } + + entitySet, featureViews, odfvs, err := l.GetFcos() + if err != nil { + return err + } + schema, err := GetSchemaFromFeatureService(featureService, entitySet, featureViews, odfvs) + + if err != nil { + return err + } + + numFeatures := len(schema.Features) + // Should be equivalent to how many entities there are(each feature row has (entity) number of features) + numRows := len(features[0].Values) + + for row_idx := 0; row_idx < numRows; row_idx++ { + featureValueLogRow := make([]*types.Value, numFeatures) + featureStatusLogRow := make([]serving.FieldStatus, numFeatures) + eventTimestampLogRow := make([]*timestamppb.Timestamp, numFeatures) + for idx := 0; idx < len(features); idx++ { + featureValueLogRow[idx] = features[idx].Values[row_idx] + featureStatusLogRow[idx] = features[idx].Statuses[row_idx] + eventTimestampLogRow[idx] = features[idx].EventTimestamps[row_idx] + } + valuesPerEntityRow := make([]*types.Value, 0) + // ensure that the entity values are in the order that the schema defines which is the order that ListEntities returns the entities + for _, joinKey := range schema.Entities { + valuesPerEntityRow = append(valuesPerEntityRow, joinKeyToEntityValues[joinKey][row_idx]) + } + newLog := Log{ + EntityValue: valuesPerEntityRow, + FeatureValues: featureValueLogRow, + FeatureStatuses: featureStatusLogRow, + EventTimestamps: eventTimestampLogRow, + RequestId: requestId, + } + err := l.EmitLog(&newLog) + if err != nil { + return err + } + } + return nil +} diff --git a/go/cmd/server/logging/logging_test.go b/go/cmd/server/logging/logging_test.go new file mode 100644 index 0000000000..68da0bf498 --- /dev/null +++ b/go/cmd/server/logging/logging_test.go @@ -0,0 +1,402 @@ +package logging + +import ( + "math/rand" + "reflect" + "testing" + "time" + + "github.com/apache/arrow/go/v8/arrow" + "github.com/apache/arrow/go/v8/arrow/array" + "github.com/feast-dev/feast/go/internal/feast/model" + "github.com/feast-dev/feast/go/internal/test" + "github.com/feast-dev/feast/go/protos/feast/serving" + "github.com/feast-dev/feast/go/protos/feast/types" + gotypes "github.com/feast-dev/feast/go/types" + "github.com/stretchr/testify/assert" + "google.golang.org/protobuf/types/known/timestamppb" +) + +func TestLoggingChannelTimeout(t *testing.T) { + // Pregenerated using `feast init`. + loggingService, err := NewLoggingService(nil, 1, "", false) + assert.Nil(t, err) + assert.Empty(t, loggingService.memoryBuffer.logs) + ts := timestamppb.New(time.Now()) + newLog := Log{ + FeatureStatuses: []serving.FieldStatus{serving.FieldStatus_PRESENT}, + EventTimestamps: []*timestamppb.Timestamp{ts, ts}, + } + loggingService.EmitLog(&newLog) + newTs := timestamppb.New(time.Now()) + + newLog2 := Log{ + FeatureStatuses: []serving.FieldStatus{serving.FieldStatus_PRESENT}, + EventTimestamps: []*timestamppb.Timestamp{newTs, newTs}, + } + err = loggingService.EmitLog(&newLog2) + // The channel times out and doesn't hang. + assert.NotNil(t, err) +} + +func TestSchemaTypeRetrieval(t *testing.T) { + featureService, entities, featureViews, odfvs := InitializeFeatureRepoVariablesForTest() + entityMap := make(map[string]*model.Entity) + expectedEntityNames := make([]string, 0) + expectedFeatureNames := make([]string, 0) + for _, entity := range entities { + entityMap[entity.Name] = entity + expectedEntityNames = append(expectedEntityNames, entity.Name) + } + for _, featureView := range featureViews { + for _, f := range featureView.Base.Features { + expectedFeatureNames = append(expectedFeatureNames, GetFullFeatureName(featureView.Base.Name, f.Name)) + } + } + for _, featureView := range odfvs { + for _, f := range featureView.Base.Features { + expectedFeatureNames = append(expectedFeatureNames, GetFullFeatureName(featureView.Base.Name, f.Name)) + } + } + + schema, err := GetSchemaFromFeatureService(featureService, entityMap, featureViews, odfvs) + assert.Nil(t, err) + + assert.Equal(t, expectedFeatureNames, schema.Features) + assert.Equal(t, expectedEntityNames, schema.Entities) + for _, entityName := range expectedEntityNames { + assert.Contains(t, schema.EntityTypes, entityName) + } + assert.True(t, reflect.DeepEqual(schema.EntityTypes["driver_id"], types.ValueType_INT64)) + + types := []types.ValueType_Enum{*types.ValueType_INT64.Enum(), *types.ValueType_FLOAT.Enum(), *types.ValueType_INT32.Enum(), *types.ValueType_DOUBLE.Enum(), *types.ValueType_INT32.Enum(), *types.ValueType_DOUBLE.Enum()} + for idx, featureName := range expectedFeatureNames { + assert.Contains(t, schema.FeaturesTypes, featureName) + assert.Equal(t, schema.FeaturesTypes[featureName], types[idx]) + } +} + +func TestSchemaRetrievalIgnoresEntitiesNotInFeatureService(t *testing.T) { + featureService, entities, featureViews, odfvs := InitializeFeatureRepoVariablesForTest() + //Remove entities in featureservice + for _, featureView := range featureViews { + featureView.Entities = []string{} + } + entityMap := make(map[string]*model.Entity) + for _, entity := range entities { + entityMap[entity.Name] = entity + } + schema, err := GetSchemaFromFeatureService(featureService, entityMap, featureViews, odfvs) + assert.Nil(t, err) + assert.Empty(t, schema.EntityTypes) +} + +func TestSchemaUsesOrderInFeatureService(t *testing.T) { + featureService, entities, featureViews, odfvs := InitializeFeatureRepoVariablesForTest() + expectedEntityNames := make([]string, 0) + expectedFeatureNames := make([]string, 0) + entityMap := make(map[string]*model.Entity) + for _, entity := range entities { + entityMap[entity.Name] = entity + } + for _, entity := range entities { + entityMap[entity.Name] = entity + expectedEntityNames = append(expectedEntityNames, entity.Name) + } + // Source of truth for order of featureNames + for _, featureView := range featureViews { + for _, f := range featureView.Base.Features { + expectedFeatureNames = append(expectedFeatureNames, GetFullFeatureName(featureView.Base.Name, f.Name)) + } + } + for _, featureView := range odfvs { + for _, f := range featureView.Base.Features { + expectedFeatureNames = append(expectedFeatureNames, GetFullFeatureName(featureView.Base.Name, f.Name)) + } + } + + rand.Seed(time.Now().UnixNano()) + // Shuffle the featureNames in incorrect order + for _, featureView := range featureViews { + rand.Shuffle(len(featureView.Base.Features), func(i, j int) { + featureView.Base.Features[i], featureView.Base.Features[j] = featureView.Base.Features[j], featureView.Base.Features[i] + }) + } + for _, featureView := range odfvs { + rand.Shuffle(len(featureView.Base.Features), func(i, j int) { + featureView.Base.Features[i], featureView.Base.Features[j] = featureView.Base.Features[j], featureView.Base.Features[i] + }) + } + + schema, err := GetSchemaFromFeatureService(featureService, entityMap, featureViews, odfvs) + assert.Nil(t, err) + + // Ensure the same results + assert.Equal(t, expectedFeatureNames, schema.Features) + assert.Equal(t, expectedEntityNames, schema.Entities) + for _, entityName := range expectedEntityNames { + assert.Contains(t, schema.EntityTypes, entityName) + } + assert.True(t, reflect.DeepEqual(schema.EntityTypes["driver_id"], types.ValueType_INT64)) + + types := []types.ValueType_Enum{*types.ValueType_INT64.Enum(), *types.ValueType_FLOAT.Enum(), *types.ValueType_INT32.Enum(), *types.ValueType_DOUBLE.Enum(), *types.ValueType_INT32.Enum(), *types.ValueType_DOUBLE.Enum()} + for idx, featureName := range expectedFeatureNames { + assert.Contains(t, schema.FeaturesTypes, featureName) + assert.Equal(t, schema.FeaturesTypes[featureName], types[idx]) + } +} + +func TestSerializeToArrowTable(t *testing.T) { + table, expectedSchema, expectedColumns, err := GetTestArrowTableAndExpectedResults() + assert.Nil(t, err) + defer table.Release() + tr := array.NewTableReader(table, -1) + + defer tr.Release() + for tr.Next() { + rec := tr.Record() + assert.NotNil(t, rec) + for _, field := range rec.Schema().Fields() { + assert.Contains(t, expectedSchema, field.Name) + assert.Equal(t, field.Type, expectedSchema[field.Name]) + } + values, err := test.GetProtoFromRecord(rec) + + assert.Nil(t, err) + for name, val := range values { + if name == "RequestId" { + continue + } + assert.Equal(t, len(val.Val), len(expectedColumns[name].Val)) + for idx, featureVal := range val.Val { + assert.Equal(t, featureVal.Val, expectedColumns[name].Val[idx].Val) + } + } + } +} + +// Initialize all dummy featureservice, entities and featureviews/on demand featureviews for testing. +func InitializeFeatureRepoVariablesForTest() (*model.FeatureService, []*model.Entity, []*model.FeatureView, []*model.OnDemandFeatureView) { + f1 := test.CreateNewFeature( + "int64", + types.ValueType_INT64, + ) + f2 := test.CreateNewFeature( + "float32", + types.ValueType_FLOAT, + ) + projection1 := test.CreateNewFeatureViewProjection( + "featureView1", + "", + []*model.Feature{f1, f2}, + map[string]string{}, + ) + baseFeatureView1 := test.CreateBaseFeatureView( + "featureView1", + []*model.Feature{f1, f2}, + projection1, + ) + featureView1 := test.CreateFeatureView(baseFeatureView1, nil, []string{"driver_id"}) + entity1 := test.CreateNewEntity("driver_id", types.ValueType_INT64, "driver_id") + f3 := test.CreateNewFeature( + "int32", + types.ValueType_INT32, + ) + f4 := test.CreateNewFeature( + "double", + types.ValueType_DOUBLE, + ) + projection2 := test.CreateNewFeatureViewProjection( + "featureView2", + "", + []*model.Feature{f3, f4}, + map[string]string{}, + ) + baseFeatureView2 := test.CreateBaseFeatureView( + "featureView2", + []*model.Feature{f3, f4}, + projection2, + ) + featureView2 := test.CreateFeatureView(baseFeatureView2, nil, []string{"driver_id"}) + + f5 := test.CreateNewFeature( + "odfv_f1", + types.ValueType_INT32, + ) + f6 := test.CreateNewFeature( + "odfv_f2", + types.ValueType_DOUBLE, + ) + projection3 := test.CreateNewFeatureViewProjection( + "od_bf1", + "", + []*model.Feature{f5, f6}, + map[string]string{}, + ) + od_bf1 := test.CreateBaseFeatureView( + "od_bf1", + []*model.Feature{f5, f6}, + projection3, + ) + odfv := model.NewOnDemandFeatureViewFromBase(od_bf1) + featureService := test.CreateNewFeatureService( + "test_service", + "test_project", + nil, + nil, + []*model.FeatureViewProjection{projection1, projection2, projection3}, + ) + return featureService, []*model.Entity{entity1}, []*model.FeatureView{featureView1, featureView2}, []*model.OnDemandFeatureView{odfv} +} + +// Create dummy FeatureService, Entities, and FeatureViews add them to the logger and convert the logs to Arrow table. +// Returns arrow table, expected test schema, and expected columns. +func GetTestArrowTableAndExpectedResults() (array.Table, map[string]arrow.DataType, map[string]*types.RepeatedValue, error) { + featureService, entities, featureViews, odfvs := InitializeFeatureRepoVariablesForTest() + entityMap := make(map[string]*model.Entity) + for _, entity := range entities { + entityMap[entity.Name] = entity + } + schema, err := GetSchemaFromFeatureService(featureService, entityMap, featureViews, odfvs) + if err != nil { + return nil, nil, nil, err + } + + ts := timestamppb.New(time.Now()) + log1 := Log{ + EntityValue: []*types.Value{ + {Val: &types.Value_Int64Val{Int64Val: 1001}}, + }, + FeatureValues: []*types.Value{ + /* normal feature values */ + {Val: &types.Value_Int64Val{Int64Val: rand.Int63()}}, + {Val: &types.Value_FloatVal{FloatVal: rand.Float32()}}, + {Val: &types.Value_Int32Val{Int32Val: rand.Int31()}}, + {Val: &types.Value_DoubleVal{DoubleVal: rand.Float64()}}, + /* odfv values */ + {Val: &types.Value_Int32Val{Int32Val: rand.Int31()}}, + {Val: &types.Value_DoubleVal{DoubleVal: rand.Float64()}}, + }, + FeatureStatuses: []serving.FieldStatus{ + serving.FieldStatus_PRESENT, + serving.FieldStatus_PRESENT, + serving.FieldStatus_PRESENT, + serving.FieldStatus_PRESENT, + serving.FieldStatus_PRESENT, + serving.FieldStatus_PRESENT, + }, + EventTimestamps: []*timestamppb.Timestamp{ + ts, ts, ts, ts, ts, ts, + }, + } + log2 := Log{ + EntityValue: []*types.Value{ + {Val: &types.Value_Int64Val{Int64Val: 1003}}, + }, + FeatureValues: []*types.Value{ + /* normal feature values */ + {Val: &types.Value_Int64Val{Int64Val: rand.Int63()}}, + {Val: &types.Value_FloatVal{FloatVal: rand.Float32()}}, + {Val: &types.Value_Int32Val{Int32Val: rand.Int31()}}, + {Val: &types.Value_DoubleVal{DoubleVal: rand.Float64()}}, + /* odfv values */ + {Val: &types.Value_Int32Val{Int32Val: rand.Int31()}}, + {Val: &types.Value_DoubleVal{DoubleVal: rand.Float64()}}, + }, + FeatureStatuses: []serving.FieldStatus{ + serving.FieldStatus_PRESENT, + serving.FieldStatus_PRESENT, + serving.FieldStatus_PRESENT, + serving.FieldStatus_PRESENT, + serving.FieldStatus_PRESENT, + serving.FieldStatus_PRESENT, + }, + EventTimestamps: []*timestamppb.Timestamp{ + ts, ts, ts, ts, ts, ts, + }, + } + + expectedSchema := make(map[string]arrow.DataType) + for joinKey, entityType := range schema.EntityTypes { + arrowType, err := gotypes.ValueTypeEnumToArrowType(entityType) + if err != nil { + return nil, nil, nil, err + } + expectedSchema[joinKey] = arrowType + } + expectedSchema["RequestId"] = arrow.BinaryTypes.String + for featureName, featureType := range schema.FeaturesTypes { + arrowType, err := gotypes.ValueTypeEnumToArrowType(featureType) + if err != nil { + return nil, nil, nil, err + } + expectedSchema[featureName] = arrowType + } + + expectedColumns := map[string]*types.RepeatedValue{ + "driver_id": { + Val: []*types.Value{ + log1.EntityValue[0], + log2.EntityValue[0]}, + }, + "featureView1__int64": { + Val: []*types.Value{ + log1.FeatureValues[0], + log2.FeatureValues[0]}, + }, + "featureView1__float32": { + Val: []*types.Value{ + log1.FeatureValues[1], + log2.FeatureValues[1]}, + }, + "featureView2__int32": { + Val: []*types.Value{ + log1.FeatureValues[2], + log2.FeatureValues[2]}, + }, + "featureView2__double": { + Val: []*types.Value{ + log1.FeatureValues[3], + log2.FeatureValues[3]}, + }, + "od_bf1__odfv_f1": { + Val: []*types.Value{ + log1.FeatureValues[4], + log2.FeatureValues[4]}, + }, + "od_bf1__odfv_f2": { + Val: []*types.Value{ + log1.FeatureValues[5], + log2.FeatureValues[5]}, + }, + } + loggingService, err := SetupLoggingServiceWithLogs([]*Log{&log1, &log2}) + if err != nil { + return nil, nil, nil, err + } + + table, err := ConvertMemoryBufferToArrowTable(loggingService.memoryBuffer, schema) + + if err != nil { + return nil, nil, nil, err + } + return table, expectedSchema, expectedColumns, nil +} + +func SetupLoggingServiceWithLogs(logs []*Log) (*LoggingService, error) { + loggingService, err := NewLoggingService(nil, len(logs), "", false) + if err != nil { + return nil, err + } + dummyTicker := time.NewTicker(10 * time.Second) + // stop the ticker so that the logs are not flushed to offline storage + dummyTicker.Stop() + for _, log := range logs { + loggingService.EmitLog(log) + } + // manually handle flushing logs + for i := 0; i < len(logs); i++ { + loggingService.PerformPeriodicAppendToMemoryBufferAndLogFlush(dummyTicker) + } + return loggingService, nil +} diff --git a/go/cmd/server/logging/offlinelogstorage.go b/go/cmd/server/logging/offlinelogstorage.go new file mode 100644 index 0000000000..1a0f414255 --- /dev/null +++ b/go/cmd/server/logging/offlinelogstorage.go @@ -0,0 +1,46 @@ +package logging + +import ( + "errors" + + "github.com/apache/arrow/go/v8/arrow/array" + "github.com/feast-dev/feast/go/internal/feast/registry" +) + +type OfflineLogStoreConfig struct { + storeType string + project string + path string +} + +type OfflineLogStorage interface { + // Todo: Maybe we can add a must implement function that retrieves the correct config based on type + FlushToStorage(array.Table) error +} + +func getOfflineStoreType(offlineStoreConfig map[string]interface{}) (string, bool) { + if onlineStoreType, ok := offlineStoreConfig["storeType"]; !ok { + // Assume file for case of no specified. + return "", true + } else { + result, ok := onlineStoreType.(string) + return result, ok + } +} + +func NewOfflineStore(config *registry.RepoConfig) (OfflineLogStorage, error) { + offlineStoreType, _ := getOfflineStoreType(config.OfflineStore) + if offlineStoreType == "" { + // No offline store specified. + return nil, nil + } else if offlineStoreType == "file" { + fileConfig, err := GetFileConfig(config) + if err != nil { + return nil, err + } + offlineStore, err := NewFileOfflineStore(config.Project, fileConfig) + return offlineStore, err + } else { + return nil, errors.New("no offline storage besides file is currently supported") + } +} diff --git a/go/cmd/server/main.go b/go/cmd/server/main.go new file mode 100644 index 0000000000..33d56e0a7a --- /dev/null +++ b/go/cmd/server/main.go @@ -0,0 +1,74 @@ +package main + +import ( + "fmt" + "log" + "net" + "os" + + "github.com/feast-dev/feast/go/cmd/server/logging" + "github.com/feast-dev/feast/go/internal/feast" + "github.com/feast-dev/feast/go/internal/feast/registry" + "github.com/feast-dev/feast/go/protos/feast/serving" + "google.golang.org/grpc" +) + +const ( + flagFeastRepoPath = "FEAST_REPO_PATH" + flagFeastRepoConfig = "FEAST_REPO_CONFIG" + flagFeastSockFile = "FEAST_GRPC_SOCK_FILE" + feastServerVersion = "0.18.0" +) + +// TODO: Add a proper logging library such as https://github.com/Sirupsen/logrus +func main() { + repoPath := os.Getenv(flagFeastRepoPath) + repoConfigJSON := os.Getenv(flagFeastRepoConfig) + sockFile := os.Getenv(flagFeastSockFile) + if repoPath == "" && repoConfigJSON == "" { + log.Fatalln(fmt.Sprintf("One of %s of %s environment variables must be set", flagFeastRepoPath, flagFeastRepoConfig)) + } + + var repoConfig *registry.RepoConfig + var err error + if repoConfigJSON != "" { + repoConfig, err = registry.NewRepoConfigFromJSON(repoPath, repoConfigJSON) + if err != nil { + log.Fatalln(err) + } + } else { + repoConfig, err = registry.NewRepoConfigFromFile(repoPath) + if err != nil { + log.Fatalln(err) + } + } + + log.Println("Initializing feature store...") + fs, err := feast.NewFeatureStore(repoConfig, nil) + if err != nil { + log.Fatalln(err) + } + // Disable logging for now + loggingService, err := logging.NewLoggingService(fs, 1000, "", false) + if err != nil { + log.Fatalln(err) + } + defer fs.DestructOnlineStore() + startGrpcServer(fs, loggingService, sockFile) +} + +func startGrpcServer(fs *feast.FeatureStore, loggingService *logging.LoggingService, sockFile string) { + server := newServingServiceServer(fs, loggingService) + log.Printf("Starting a gRPC server listening on %s\n", sockFile) + lis, err := net.Listen("unix", sockFile) + if err != nil { + log.Fatalln(err) + } + grpcServer := grpc.NewServer() + defer grpcServer.Stop() + serving.RegisterServingServiceServer(grpcServer, server) + err = grpcServer.Serve(lis) + if err != nil { + log.Fatalln(err) + } +} diff --git a/go/cmd/server/server.go b/go/cmd/server/server.go new file mode 100644 index 0000000000..3708689268 --- /dev/null +++ b/go/cmd/server/server.go @@ -0,0 +1,86 @@ +package main + +import ( + "context" + + "github.com/feast-dev/feast/go/cmd/server/logging" + "github.com/feast-dev/feast/go/internal/feast" + "github.com/feast-dev/feast/go/protos/feast/serving" + prototypes "github.com/feast-dev/feast/go/protos/feast/types" + "github.com/feast-dev/feast/go/types" + "github.com/google/uuid" +) + +type servingServiceServer struct { + fs *feast.FeatureStore + loggingService *logging.LoggingService + serving.UnimplementedServingServiceServer +} + +func newServingServiceServer(fs *feast.FeatureStore, loggingService *logging.LoggingService) *servingServiceServer { + return &servingServiceServer{fs: fs, loggingService: loggingService} +} + +func (s *servingServiceServer) GetFeastServingInfo(ctx context.Context, request *serving.GetFeastServingInfoRequest) (*serving.GetFeastServingInfoResponse, error) { + return &serving.GetFeastServingInfoResponse{ + Version: feastServerVersion, + }, nil +} + +// Returns an object containing the response to GetOnlineFeatures. +// Metadata contains featurenames that corresponds to the number of rows in response.Results. +// Results contains values including the value of the feature, the event timestamp, and feature status in a columnar format. +func (s *servingServiceServer) GetOnlineFeatures(ctx context.Context, request *serving.GetOnlineFeaturesRequest) (*serving.GetOnlineFeaturesResponse, error) { + requestId := GenerateRequestId() + featuresOrService, err := s.fs.ParseFeatures(request.GetKind()) + if err != nil { + return nil, err + } + featureVectors, err := s.fs.GetOnlineFeatures( + ctx, + featuresOrService.FeaturesRefs, + featuresOrService.FeatureService, + request.GetEntities(), + request.GetRequestContext(), + request.GetFullFeatureNames()) + if err != nil { + return nil, err + } + + resp := &serving.GetOnlineFeaturesResponse{ + Results: make([]*serving.GetOnlineFeaturesResponse_FeatureVector, 0), + Metadata: &serving.GetOnlineFeaturesResponseMetadata{ + FeatureNames: &serving.FeatureList{Val: make([]string, 0)}, + }, + } + // Entities are currently part of the features as a value and the order that we add it to the resp MetaData + // Need to figure out a way to map the correct entities to the correct ordering + entityValuesMap := make(map[string][]*prototypes.Value, 0) + featureNames := make([]string, len(featureVectors)) + for idx, vector := range featureVectors { + resp.Metadata.FeatureNames.Val = append(resp.Metadata.FeatureNames.Val, vector.Name) + featureNames[idx] = vector.Name + values, err := types.ArrowValuesToProtoValues(vector.Values) + if err != nil { + return nil, err + } + if _, ok := request.Entities[vector.Name]; ok { + entityValuesMap[vector.Name] = values + } + + resp.Results = append(resp.Results, &serving.GetOnlineFeaturesResponse_FeatureVector{ + Values: values, + Statuses: vector.Statuses, + EventTimestamps: vector.Timestamps, + }) + } + if featuresOrService.FeatureService != nil { + go s.loggingService.GenerateLogs(featuresOrService.FeatureService, entityValuesMap, resp.Results[len(request.Entities):], request.RequestContext, requestId) + } + return resp, nil +} + +func GenerateRequestId() string { + id := uuid.New() + return id.String() +} diff --git a/go/cmd/server/server_test.go b/go/cmd/server/server_test.go new file mode 100644 index 0000000000..9d4ffb50bf --- /dev/null +++ b/go/cmd/server/server_test.go @@ -0,0 +1,279 @@ +package main + +import ( + "context" + "net" + "os" + "path/filepath" + "reflect" + "runtime" + "testing" + "time" + + "github.com/feast-dev/feast/go/internal/feast/registry" + + "github.com/apache/arrow/go/v8/arrow/array" + "github.com/apache/arrow/go/v8/arrow/memory" + "github.com/apache/arrow/go/v8/parquet/file" + "github.com/apache/arrow/go/v8/parquet/pqarrow" + "github.com/feast-dev/feast/go/cmd/server/logging" + "github.com/feast-dev/feast/go/internal/feast" + "github.com/feast-dev/feast/go/internal/test" + "github.com/feast-dev/feast/go/protos/feast/serving" + "github.com/feast-dev/feast/go/protos/feast/types" + "github.com/stretchr/testify/assert" + "google.golang.org/grpc" + "google.golang.org/grpc/test/bufconn" +) + +// Return absolute path to the test_repo directory regardless of the working directory +func getRepoPath(basePath string) string { + // Get the file path of this source file, regardless of the working directory + if basePath == "" { + _, filename, _, ok := runtime.Caller(0) + if !ok { + panic("couldn't find file path of the test file") + } + return filepath.Join(filename, "..", "..", "feature_repo") + } else { + return filepath.Join(basePath, "feature_repo") + } +} + +// Starts a new grpc server, registers the serving service and returns a client. +func getClient(ctx context.Context, offlineStoreType string, basePath string, enableLogging bool) (serving.ServingServiceClient, func()) { + buffer := 1024 * 1024 + listener := bufconn.Listen(buffer) + + server := grpc.NewServer() + config, err := registry.NewRepoConfigFromFile(getRepoPath(basePath)) + + // TODO(kevjumba): either add this officially or talk in design review about what the correct solution for what do with path. + // Currently in python we use the path in FileSource but it is not specified in configuration unless it is using file_url? + if enableLogging { + if config.OfflineStore == nil { + config.OfflineStore = map[string]interface{}{} + } + absPath, err := filepath.Abs(filepath.Join(getRepoPath(basePath), "log.parquet")) + if err != nil { + panic(err) + } + config.OfflineStore["path"] = absPath + config.OfflineStore["storeType"] = offlineStoreType + } + + if err != nil { + panic(err) + } + fs, err := feast.NewFeatureStore(config, nil) + if err != nil { + panic(err) + } + loggingService, err := logging.NewLoggingService(fs, 1000, "test_service", enableLogging) + if err != nil { + panic(err) + } + servingServiceServer := newServingServiceServer(fs, loggingService) + + serving.RegisterServingServiceServer(server, servingServiceServer) + go func() { + if err := server.Serve(listener); err != nil { + panic(err) + } + }() + + conn, _ := grpc.DialContext(ctx, "", grpc.WithContextDialer(func(context.Context, string) (net.Conn, error) { + return listener.Dial() + }), grpc.WithInsecure()) + + closer := func() { + listener.Close() + server.Stop() + } + + client := serving.NewServingServiceClient(conn) + + return client, closer +} + +func TestGetFeastServingInfo(t *testing.T) { + ctx := context.Background() + // Pregenerated using `feast init`. + dir := "logging/" + err := test.SetupInitializedRepo(dir) + assert.Nil(t, err) + defer test.CleanUpInitializedRepo(dir) + + client, closer := getClient(ctx, "", dir, false) + defer closer() + response, err := client.GetFeastServingInfo(ctx, &serving.GetFeastServingInfoRequest{}) + assert.Nil(t, err) + assert.Equal(t, feastServerVersion, response.Version) +} + +func TestGetOnlineFeaturesSqlite(t *testing.T) { + ctx := context.Background() + // Pregenerated using `feast init`. + dir := "logging/" + err := test.SetupInitializedRepo(dir) + assert.Nil(t, err) + defer test.CleanUpInitializedRepo(dir) + + client, closer := getClient(ctx, "", dir, false) + defer closer() + entities := make(map[string]*types.RepeatedValue) + entities["driver_id"] = &types.RepeatedValue{ + Val: []*types.Value{ + {Val: &types.Value_Int64Val{Int64Val: 1001}}, + {Val: &types.Value_Int64Val{Int64Val: 1003}}, + {Val: &types.Value_Int64Val{Int64Val: 1005}}, + }, + } + request := &serving.GetOnlineFeaturesRequest{ + Kind: &serving.GetOnlineFeaturesRequest_Features{ + Features: &serving.FeatureList{ + Val: []string{"driver_hourly_stats:conv_rate", "driver_hourly_stats:acc_rate", "driver_hourly_stats:avg_daily_trips"}, + }, + }, + Entities: entities, + } + response, err := client.GetOnlineFeatures(ctx, request) + assert.Nil(t, err) + assert.NotNil(t, response) + expectedEntityValuesResp := []*types.Value{ + {Val: &types.Value_Int64Val{Int64Val: 1001}}, + {Val: &types.Value_Int64Val{Int64Val: 1003}}, + {Val: &types.Value_Int64Val{Int64Val: 1005}}, + } + expectedFeatureNamesResp := []string{"driver_id", "conv_rate", "acc_rate", "avg_daily_trips"} + rows, err := test.ReadParquet(filepath.Join(dir, "feature_repo", "driver_stats.parquet")) + assert.Nil(t, err) + entityKeys := map[int64]bool{1001: true, 1003: true, 1005: true} + correctFeatures := test.GetLatestFeatures(rows, entityKeys) + expectedConvRateValues := []*types.Value{} + expectedAccRateValues := []*types.Value{} + expectedAvgDailyTripsValues := []*types.Value{} + + for _, key := range []int64{1001, 1003, 1005} { + expectedConvRateValues = append(expectedConvRateValues, &types.Value{Val: &types.Value_FloatVal{FloatVal: correctFeatures[key].ConvRate}}) + expectedAccRateValues = append(expectedAccRateValues, &types.Value{Val: &types.Value_FloatVal{FloatVal: correctFeatures[key].AccRate}}) + expectedAvgDailyTripsValues = append(expectedAvgDailyTripsValues, &types.Value{Val: &types.Value_Int64Val{Int64Val: int64(correctFeatures[key].AvgDailyTrips)}}) + } + // Columnar so get in column format row by row should have column names of all features + assert.Equal(t, len(response.Results), 4) + + assert.True(t, reflect.DeepEqual(response.Results[0].Values, expectedEntityValuesResp)) + assert.True(t, reflect.DeepEqual(response.Results[1].Values, expectedConvRateValues)) + assert.True(t, reflect.DeepEqual(response.Results[2].Values, expectedAccRateValues)) + assert.True(t, reflect.DeepEqual(response.Results[3].Values, expectedAvgDailyTripsValues)) + + assert.True(t, reflect.DeepEqual(response.Metadata.FeatureNames.Val, expectedFeatureNamesResp)) +} + +func TestGetOnlineFeaturesSqliteWithLogging(t *testing.T) { + ctx := context.Background() + // Pregenerated using `feast init`. + dir := "logging/" + err := test.SetupInitializedRepo(dir) + assert.Nil(t, err) + defer test.CleanUpInitializedRepo(dir) + + client, closer := getClient(ctx, "file", dir, true) + defer closer() + entities := make(map[string]*types.RepeatedValue) + entities["driver_id"] = &types.RepeatedValue{ + Val: []*types.Value{ + {Val: &types.Value_Int64Val{Int64Val: 1001}}, + {Val: &types.Value_Int64Val{Int64Val: 1003}}, + {Val: &types.Value_Int64Val{Int64Val: 1005}}, + }, + } + + request := &serving.GetOnlineFeaturesRequest{ + Kind: &serving.GetOnlineFeaturesRequest_FeatureService{ + FeatureService: "test_service", + }, + Entities: entities, + FullFeatureNames: true, + } + response, err := client.GetOnlineFeatures(ctx, request) + + assert.Nil(t, err) + assert.NotNil(t, response) + + // Get the featurenames without the entity names that are appended at the front. + featureNames := response.Metadata.FeatureNames.Val[len(request.Entities):] + // Generated expected log rows and values + // TODO(kevjumba): implement for timestamp and status + expectedLogValues, _, _ := GetExpectedLogRows(featureNames, response.Results[len(request.Entities):]) + expectedLogValues["driver_id"] = entities["driver_id"] + logPath, err := filepath.Abs(filepath.Join(dir, "feature_repo", "log.parquet")) + // Wait for logger to flush. + assert.Eventually(t, func() bool { + var _, err = os.Stat(logPath) + if os.IsNotExist(err) { + return false + } else { + return true + } + }, 1*time.Second, logging.DEFAULT_LOG_FLUSH_INTERVAL) + assert.Nil(t, err) + pf, err := file.OpenParquetFile(logPath, false) + assert.Nil(t, err) + + reader, err := pqarrow.NewFileReader(pf, pqarrow.ArrowReadProperties{}, memory.DefaultAllocator) + assert.Nil(t, err) + + tbl, err := reader.ReadTable(ctx) + assert.Nil(t, err) + tr := array.NewTableReader(tbl, -1) + defer tbl.Release() + defer tr.Release() + for tr.Next() { + rec := tr.Record() + assert.NotNil(t, rec) + values, err := test.GetProtoFromRecord(rec) + + assert.Nil(t, err) + assert.Equal(t, len(values)-1 /*request id column not counted*/, len(expectedLogValues)) + // Need to iterate through and compare because certain values in types.RepeatedValues aren't accurately being compared. + for name, val := range values { + if name == "RequestId" { + // Ensure there are request ids for each entity. + assert.Equal(t, len(val.Val), len(response.Results[0].Values)) + } else { + assert.Equal(t, len(val.Val), len(expectedLogValues[name].Val)) + for idx, featureVal := range val.Val { + assert.Equal(t, featureVal.Val, expectedLogValues[name].Val[idx].Val) + } + } + + } + } + err = test.CleanUpFile(logPath) + assert.Nil(t, err) +} + +// Generate the expected log rows based on the resulting feature vector returned from GetOnlineFeatures. +func GetExpectedLogRows(featureNames []string, results []*serving.GetOnlineFeaturesResponse_FeatureVector) (map[string]*types.RepeatedValue, [][]int32, [][]int64) { + numFeatures := len(featureNames) + numRows := len(results[0].Values) + featureValueLogRows := make(map[string]*types.RepeatedValue) + featureStatusLogRows := make([][]int32, numRows) + eventTimestampLogRows := make([][]int64, numRows) + for idx := 0; idx < len(results); idx++ { + valArray := make([]*types.Value, 0) + for row_idx := 0; row_idx < numRows; row_idx++ { + featureStatusLogRows[row_idx] = make([]int32, numFeatures) + eventTimestampLogRows[row_idx] = make([]int64, numFeatures) + valArray = append(valArray, results[idx].Values[row_idx]) + featureStatusLogRows[row_idx][idx] = int32(serving.FieldStatus_PRESENT) + eventTimestampLogRows[row_idx][idx] = results[idx].EventTimestamps[row_idx].AsTime().UnixNano() / int64(time.Millisecond) + + } + featureValueLogRows[featureNames[idx]] = &types.RepeatedValue{ + Val: valArray, + } + } + return featureValueLogRows, featureStatusLogRows, eventTimestampLogRows +} diff --git a/go/embedded/online_features.go b/go/embedded/online_features.go new file mode 100644 index 0000000000..24a5489430 --- /dev/null +++ b/go/embedded/online_features.go @@ -0,0 +1,227 @@ +package embedded + +import ( + "context" + "fmt" + "log" + + "github.com/apache/arrow/go/v8/arrow" + "github.com/apache/arrow/go/v8/arrow/array" + "github.com/apache/arrow/go/v8/arrow/cdata" + "github.com/apache/arrow/go/v8/arrow/memory" + "github.com/feast-dev/feast/go/internal/feast" + "github.com/feast-dev/feast/go/internal/feast/model" + "github.com/feast-dev/feast/go/internal/feast/onlineserving" + "github.com/feast-dev/feast/go/internal/feast/registry" + "github.com/feast-dev/feast/go/internal/feast/transformation" + prototypes "github.com/feast-dev/feast/go/protos/feast/types" + "github.com/feast-dev/feast/go/types" +) + +type OnlineFeatureService struct { + fs *feast.FeatureStore +} + +type OnlineFeatureServiceConfig struct { + RepoPath string + RepoConfig string +} + +type DataTable struct { + DataPtr uintptr + SchemaPtr uintptr +} + +func NewOnlineFeatureService(conf *OnlineFeatureServiceConfig, transformationCallback transformation.TransformationCallback) *OnlineFeatureService { + repoConfig, err := registry.NewRepoConfigFromJSON(conf.RepoPath, conf.RepoConfig) + if err != nil { + log.Fatalln(err) + } + + fs, err := feast.NewFeatureStore(repoConfig, transformationCallback) + if err != nil { + log.Fatalln(err) + } + + return &OnlineFeatureService{fs: fs} +} + +func (s *OnlineFeatureService) GetEntityTypesMap(featureRefs []string) (map[string]int32, error) { + viewNames := make(map[string]interface{}) + for _, featureRef := range featureRefs { + viewName, _, err := onlineserving.ParseFeatureReference(featureRef) + if err != nil { + return nil, err + } + viewNames[viewName] = nil + } + + entities, _ := s.fs.ListEntities(true) + entitiesByName := make(map[string]*model.Entity) + for _, entity := range entities { + entitiesByName[entity.Name] = entity + } + + joinKeyTypes := make(map[string]int32) + + for viewName := range viewNames { + view, err := s.fs.GetFeatureView(viewName, true) + if err != nil { + // skip on demand feature views + continue + } + for _, entityName := range view.Entities { + entity := entitiesByName[entityName] + joinKeyTypes[entity.JoinKey] = int32(entity.ValueType.Number()) + } + } + + return joinKeyTypes, nil +} + +func (s *OnlineFeatureService) GetEntityTypesMapByFeatureService(featureServiceName string) (map[string]int32, error) { + featureService, err := s.fs.GetFeatureService(featureServiceName) + if err != nil { + return nil, err + } + + joinKeyTypes := make(map[string]int32) + + entities, _ := s.fs.ListEntities(true) + entitiesByName := make(map[string]*model.Entity) + for _, entity := range entities { + entitiesByName[entity.Name] = entity + } + + for _, projection := range featureService.Projections { + view, err := s.fs.GetFeatureView(projection.Name, true) + if err != nil { + // skip on demand feature views + continue + } + for _, entityName := range view.Entities { + entity := entitiesByName[entityName] + joinKeyTypes[entity.JoinKey] = int32(entity.ValueType.Number()) + } + } + + return joinKeyTypes, nil +} + +func (s *OnlineFeatureService) GetOnlineFeatures( + featureRefs []string, + featureServiceName string, + entities DataTable, + requestData DataTable, + fullFeatureNames bool, + output DataTable) error { + + entitiesRecord, err := readArrowRecord(entities) + if err != nil { + return err + } + + numRows := entitiesRecord.Column(0).Len() + + entitiesProto, err := recordToProto(entitiesRecord) + if err != nil { + return err + } + + requestDataRecords, err := readArrowRecord(requestData) + if err != nil { + return err + } + + requestDataProto, err := recordToProto(requestDataRecords) + if err != nil { + return err + } + + var featureService *model.FeatureService + if featureServiceName != "" { + featureService, err = s.fs.GetFeatureService(featureServiceName) + } + + resp, err := s.fs.GetOnlineFeatures( + context.Background(), + featureRefs, + featureService, + entitiesProto, + requestDataProto, + fullFeatureNames) + + if err != nil { + return err + } + + outputFields := make([]arrow.Field, 0) + outputColumns := make([]arrow.Array, 0) + pool := memory.NewGoAllocator() + for _, featureVector := range resp { + outputFields = append(outputFields, + arrow.Field{ + Name: featureVector.Name, + Type: featureVector.Values.DataType()}) + outputFields = append(outputFields, + arrow.Field{ + Name: fmt.Sprintf("%s__status", featureVector.Name), + Type: arrow.PrimitiveTypes.Int32}) + outputFields = append(outputFields, + arrow.Field{ + Name: fmt.Sprintf("%s__timestamp", featureVector.Name), + Type: arrow.PrimitiveTypes.Int64}) + + outputColumns = append(outputColumns, featureVector.Values) + + statusColumnBuilder := array.NewInt32Builder(pool) + for _, status := range featureVector.Statuses { + statusColumnBuilder.Append(int32(status)) + } + statusColumn := statusColumnBuilder.NewArray() + outputColumns = append(outputColumns, statusColumn) + + tsColumnBuilder := array.NewInt64Builder(pool) + for _, ts := range featureVector.Timestamps { + tsColumnBuilder.Append(ts.GetSeconds()) + } + tsColumn := tsColumnBuilder.NewArray() + outputColumns = append(outputColumns, tsColumn) + } + + result := array.NewRecord(arrow.NewSchema(outputFields, nil), outputColumns, int64(numRows)) + + cdata.ExportArrowRecordBatch(result, + cdata.ArrayFromPtr(output.DataPtr), + cdata.SchemaFromPtr(output.SchemaPtr)) + + return nil +} + +/* + Read Record Batch from memory managed by Python caller. + Python part uses C ABI interface to export this record into C Data Interface, + and then it provides pointers (dataPtr & schemaPtr) to the Go part. + Here we import this data from given pointers and wrap the underlying values + into Go Arrow Interface (array.Record). + See export code here https://github.com/feast-dev/feast/blob/master/sdk/python/feast/embedded_go/online_features_service.py +*/ +func readArrowRecord(data DataTable) (array.Record, error) { + return cdata.ImportCRecordBatch( + cdata.ArrayFromPtr(data.DataPtr), + cdata.SchemaFromPtr(data.SchemaPtr)) +} + +func recordToProto(rec array.Record) (map[string]*prototypes.RepeatedValue, error) { + r := make(map[string]*prototypes.RepeatedValue) + schema := rec.Schema() + for idx, column := range rec.Columns() { + field := schema.Field(idx) + values, err := types.ArrowValuesToProtoValues(column) + if err != nil { + return nil, err + } + r[field.Name] = &prototypes.RepeatedValue{Val: values} + } + return r, nil +} diff --git a/go/internal/feast/featurestore.go b/go/internal/feast/featurestore.go new file mode 100644 index 0000000000..5e10f4978e --- /dev/null +++ b/go/internal/feast/featurestore.go @@ -0,0 +1,289 @@ +package feast + +import ( + "context" + "errors" + + "github.com/apache/arrow/go/v8/arrow/memory" + "github.com/feast-dev/feast/go/internal/feast/model" + "github.com/feast-dev/feast/go/internal/feast/onlineserving" + "github.com/feast-dev/feast/go/internal/feast/onlinestore" + "github.com/feast-dev/feast/go/internal/feast/registry" + "github.com/feast-dev/feast/go/internal/feast/transformation" + "github.com/feast-dev/feast/go/protos/feast/serving" + prototypes "github.com/feast-dev/feast/go/protos/feast/types" +) + +type FeatureStore struct { + config *registry.RepoConfig + registry *registry.Registry + onlineStore onlinestore.OnlineStore + transformationCallback transformation.TransformationCallback +} + +// A Features struct specifies a list of features to be retrieved from the online store. These features +// can be specified either as a list of string feature references or as a feature service. String +// feature references must have format "feature_view:feature", e.g. "customer_fv:daily_transactions". +type Features struct { + FeaturesRefs []string + FeatureService *model.FeatureService +} + +func (fs *FeatureStore) Registry() *registry.Registry { + return fs.registry +} + +func (fs *FeatureStore) GetRepoConfig() *registry.RepoConfig { + return fs.config +} + +// NewFeatureStore constructs a feature store fat client using the +// repo config (contents of feature_store.yaml converted to JSON map). +func NewFeatureStore(config *registry.RepoConfig, callback transformation.TransformationCallback) (*FeatureStore, error) { + onlineStore, err := onlinestore.NewOnlineStore(config) + if err != nil { + return nil, err + } + + registry, err := registry.NewRegistry(config.GetRegistryConfig(), config.RepoPath) + if err != nil { + return nil, err + } + registry.InitializeRegistry() + + return &FeatureStore{ + config: config, + registry: registry, + onlineStore: onlineStore, + transformationCallback: callback, + }, nil +} + +// TODO: Review all functions that use ODFV and Request FV since these have not been tested +// ToDo: Split GetOnlineFeatures interface into two: GetOnlinFeaturesByFeatureService and GetOnlineFeaturesByFeatureRefs +func (fs *FeatureStore) GetOnlineFeatures( + ctx context.Context, + featureRefs []string, + featureService *model.FeatureService, + joinKeyToEntityValues map[string]*prototypes.RepeatedValue, + requestData map[string]*prototypes.RepeatedValue, + fullFeatureNames bool) ([]*onlineserving.FeatureVector, error) { + fvs, odFvs, err := fs.listAllViews() + if err != nil { + return nil, err + } + + entities, err := fs.ListEntities(false) + if err != nil { + return nil, err + } + + var requestedFeatureViews []*onlineserving.FeatureViewAndRefs + var requestedOnDemandFeatureViews []*model.OnDemandFeatureView + if featureService != nil { + requestedFeatureViews, requestedOnDemandFeatureViews, err = + onlineserving.GetFeatureViewsToUseByService(featureService, fvs, odFvs) + } else { + requestedFeatureViews, requestedOnDemandFeatureViews, err = + onlineserving.GetFeatureViewsToUseByFeatureRefs(featureRefs, fvs, odFvs) + } + if err != nil { + return nil, err + } + + entityNameToJoinKeyMap, expectedJoinKeysSet, err := onlineserving.GetEntityMaps(requestedFeatureViews, entities) + if err != nil { + return nil, err + } + + err = onlineserving.ValidateFeatureRefs(requestedFeatureViews, fullFeatureNames) + if err != nil { + return nil, err + } + + numRows, err := onlineserving.ValidateEntityValues(joinKeyToEntityValues, requestData, expectedJoinKeysSet) + if err != nil { + return nil, err + } + + err = transformation.EnsureRequestedDataExist(requestedOnDemandFeatureViews, requestData) + if err != nil { + return nil, err + } + + result := make([]*onlineserving.FeatureVector, 0) + arrowMemory := memory.NewGoAllocator() + featureViews := make([]*model.FeatureView, len(requestedFeatureViews)) + index := 0 + for _, featuresAndView := range requestedFeatureViews { + featureViews[index] = featuresAndView.View + index += 1 + } + + entitylessCase := false + for _, featureView := range featureViews { + if featureView.HasEntity(model.DUMMY_ENTITY_NAME) { + entitylessCase = true + break + } + } + + if entitylessCase { + dummyEntityColumn := &prototypes.RepeatedValue{Val: make([]*prototypes.Value, numRows)} + for index := 0; index < numRows; index++ { + dummyEntityColumn.Val[index] = &model.DUMMY_ENTITY + } + joinKeyToEntityValues[model.DUMMY_ENTITY_ID] = dummyEntityColumn + } + + groupedRefs, err := onlineserving.GroupFeatureRefs(requestedFeatureViews, joinKeyToEntityValues, entityNameToJoinKeyMap, fullFeatureNames) + if err != nil { + return nil, err + } + + for _, groupRef := range groupedRefs { + featureData, err := fs.readFromOnlineStore(ctx, groupRef.EntityKeys, groupRef.FeatureViewNames, groupRef.FeatureNames) + if err != nil { + return nil, err + } + + vectors, err := onlineserving.TransposeFeatureRowsIntoColumns( + featureData, + groupRef, + requestedFeatureViews, + arrowMemory, + numRows, + ) + if err != nil { + return nil, err + } + result = append(result, vectors...) + } + + if fs.transformationCallback != nil { + onDemandFeatures, err := transformation.AugmentResponseWithOnDemandTransforms( + requestedOnDemandFeatureViews, + requestData, + joinKeyToEntityValues, + result, + fs.transformationCallback, + arrowMemory, + numRows, + fullFeatureNames, + ) + if err != nil { + return nil, err + } + result = append(result, onDemandFeatures...) + } + + result, err = onlineserving.KeepOnlyRequestedFeatures(result, featureRefs, featureService, fullFeatureNames) + if err != nil { + return nil, err + } + + entityColumns, err := onlineserving.EntitiesToFeatureVectors(joinKeyToEntityValues, arrowMemory, numRows) + result = append(entityColumns, result...) + return result, nil +} + +func (fs *FeatureStore) DestructOnlineStore() { + fs.onlineStore.Destruct() +} + +// ParseFeatures parses the kind field of a GetOnlineFeaturesRequest protobuf message +// and populates a Features struct with the result. +func (fs *FeatureStore) ParseFeatures(kind interface{}) (*Features, error) { + if featureList, ok := kind.(*serving.GetOnlineFeaturesRequest_Features); ok { + return &Features{FeaturesRefs: featureList.Features.GetVal(), FeatureService: nil}, nil + } + if featureServiceRequest, ok := kind.(*serving.GetOnlineFeaturesRequest_FeatureService); ok { + featureService, err := fs.registry.GetFeatureService(fs.config.Project, featureServiceRequest.FeatureService) + if err != nil { + return nil, err + } + return &Features{FeaturesRefs: nil, FeatureService: featureService}, nil + } + return nil, errors.New("cannot parse kind from GetOnlineFeaturesRequest") +} + +func (fs *FeatureStore) GetFeatureService(name string) (*model.FeatureService, error) { + return fs.registry.GetFeatureService(fs.config.Project, name) +} + +func (fs *FeatureStore) listAllViews() (map[string]*model.FeatureView, map[string]*model.OnDemandFeatureView, error) { + fvs := make(map[string]*model.FeatureView) + odFvs := make(map[string]*model.OnDemandFeatureView) + + featureViews, err := fs.ListFeatureViews() + if err != nil { + return nil, nil, err + } + for _, featureView := range featureViews { + fvs[featureView.Base.Name] = featureView + } + + onDemandFeatureViews, err := fs.registry.ListOnDemandFeatureViews(fs.config.Project) + if err != nil { + return nil, nil, err + } + for _, onDemandFeatureView := range onDemandFeatureViews { + odFvs[onDemandFeatureView.Base.Name] = onDemandFeatureView + } + return fvs, odFvs, nil +} + +func (fs *FeatureStore) ListFeatureViews() ([]*model.FeatureView, error) { + featureViews, err := fs.registry.ListFeatureViews(fs.config.Project) + if err != nil { + return featureViews, err + } + return featureViews, nil +} + +func (fs *FeatureStore) ListEntities(hideDummyEntity bool) ([]*model.Entity, error) { + + allEntities, err := fs.registry.ListEntities(fs.config.Project) + if err != nil { + return allEntities, err + } + entities := make([]*model.Entity, 0) + for _, entity := range allEntities { + if entity.Name != model.DUMMY_ENTITY_NAME || !hideDummyEntity { + entities = append(entities, entity) + } + } + return entities, nil +} + +func (fs *FeatureStore) ListOnDemandFeatureViews() ([]*model.OnDemandFeatureView, error) { + return fs.registry.ListOnDemandFeatureViews(fs.config.Project) +} + +/* +Group feature views that share the same set of join keys. For each group, we store only unique rows and save indices to retrieve those +rows for each requested feature +*/ + +func (fs *FeatureStore) GetFeatureView(featureViewName string, hideDummyEntity bool) (*model.FeatureView, error) { + fv, err := fs.registry.GetFeatureView(fs.config.Project, featureViewName) + if err != nil { + return nil, err + } + if fv.HasEntity(model.DUMMY_ENTITY_NAME) && hideDummyEntity { + fv.Entities = []string{} + } + return fv, nil +} + +func (fs *FeatureStore) readFromOnlineStore(ctx context.Context, entityRows []*prototypes.EntityKey, + requestedFeatureViewNames []string, + requestedFeatureNames []string, +) ([][]onlinestore.FeatureData, error) { + numRows := len(entityRows) + entityRowsValue := make([]*prototypes.EntityKey, numRows) + for index, entityKey := range entityRows { + entityRowsValue[index] = &prototypes.EntityKey{JoinKeys: entityKey.JoinKeys, EntityValues: entityKey.EntityValues} + } + return fs.onlineStore.OnlineRead(ctx, entityRowsValue, requestedFeatureViewNames, requestedFeatureNames) +} diff --git a/go/internal/feast/featurestore_test.go b/go/internal/feast/featurestore_test.go new file mode 100644 index 0000000000..c8f9049c4a --- /dev/null +++ b/go/internal/feast/featurestore_test.go @@ -0,0 +1,71 @@ +package feast + +import ( + "context" + "path/filepath" + "runtime" + "testing" + + "github.com/feast-dev/feast/go/internal/feast/onlinestore" + "github.com/feast-dev/feast/go/internal/feast/registry" + "github.com/feast-dev/feast/go/protos/feast/types" + "github.com/stretchr/testify/assert" +) + +// Return absolute path to the test_repo registry regardless of the working directory +func getRegistryPath() map[string]interface{} { + // Get the file path of this source file, regardless of the working directory + _, filename, _, ok := runtime.Caller(0) + if !ok { + panic("couldn't find file path of the test file") + } + registry := map[string]interface{}{ + "path": filepath.Join(filename, "..", "..", "..", "feature_repo/data/registry.db"), + } + return registry +} + +func TestNewFeatureStore(t *testing.T) { + t.Skip("@todo(achals): feature_repo isn't checked in yet") + config := registry.RepoConfig{ + Project: "feature_repo", + Registry: getRegistryPath(), + Provider: "local", + OnlineStore: map[string]interface{}{ + "type": "redis", + }, + } + fs, err := NewFeatureStore(&config, nil) + assert.Nil(t, err) + assert.IsType(t, &onlinestore.RedisOnlineStore{}, fs.onlineStore) +} + +func TestGetOnlineFeaturesRedis(t *testing.T) { + t.Skip("@todo(achals): feature_repo isn't checked in yet") + config := registry.RepoConfig{ + Project: "feature_repo", + Registry: getRegistryPath(), + Provider: "local", + OnlineStore: map[string]interface{}{ + "type": "redis", + "connection_string": "localhost:6379", + }, + } + + featureNames := []string{"driver_hourly_stats:conv_rate", + "driver_hourly_stats:acc_rate", + "driver_hourly_stats:avg_daily_trips", + } + entities := map[string]*types.RepeatedValue{"driver_id": {Val: []*types.Value{{Val: &types.Value_Int64Val{Int64Val: 1001}}, + {Val: &types.Value_Int64Val{Int64Val: 1002}}, + {Val: &types.Value_Int64Val{Int64Val: 1003}}}}, + } + + fs, err := NewFeatureStore(&config, nil) + assert.Nil(t, err) + ctx := context.Background() + response, err := fs.GetOnlineFeatures( + ctx, featureNames, nil, entities, map[string]*types.RepeatedValue{}, true) + assert.Nil(t, err) + assert.Len(t, response, 4) // 3 Features + 1 entity = 4 columns (feature vectors) in response +} diff --git a/go/internal/feast/model/basefeatureview.go b/go/internal/feast/model/basefeatureview.go new file mode 100644 index 0000000000..28ef7231fd --- /dev/null +++ b/go/internal/feast/model/basefeatureview.go @@ -0,0 +1,59 @@ +package model + +import ( + "fmt" + + "github.com/feast-dev/feast/go/protos/feast/core" +) + +type BaseFeatureView struct { + Name string + Features []*Feature + Projection *FeatureViewProjection +} + +func NewBaseFeatureView(name string, featureProtos []*core.FeatureSpecV2) *BaseFeatureView { + base := &BaseFeatureView{Name: name} + features := make([]*Feature, len(featureProtos)) + for index, featureSpecV2 := range featureProtos { + features[index] = NewFeatureFromProto(featureSpecV2) + } + base.Features = features + base.Projection = NewFeatureViewProjectionFromDefinition(base) + return base +} + +func (fv *BaseFeatureView) WithProjection(projection *FeatureViewProjection) (*BaseFeatureView, error) { + if projection.Name != fv.Name { + return nil, fmt.Errorf("the projection for the %s FeatureView cannot be applied because it differs "+ + "in Name; the projection is named %s and the Name indicates which "+ + "FeatureView the projection is for", fv.Name, projection.Name) + } + features := make(map[string]bool) + for _, feature := range fv.Features { + features[feature.Name] = true + } + for _, feature := range projection.Features { + if _, ok := features[feature.Name]; !ok { + return nil, fmt.Errorf("the projection for %s cannot be applied because it contains %s which the "+ + "FeatureView doesn't have", projection.Name, feature.Name) + } + } + return &BaseFeatureView{Name: fv.Name, Features: fv.Features, Projection: projection}, nil +} + +func (fv *BaseFeatureView) ProjectWithFeatures(featureNames []string) *FeatureViewProjection { + features := make([]*Feature, 0) + for _, feature := range fv.Features { + for _, allowedFeatureName := range featureNames { + if feature.Name == allowedFeatureName { + features = append(features, feature) + } + } + } + + return &FeatureViewProjection{ + Name: fv.Name, + Features: features, + } +} diff --git a/go/internal/feast/model/entity.go b/go/internal/feast/model/entity.go new file mode 100644 index 0000000000..ac3a5d5f26 --- /dev/null +++ b/go/internal/feast/model/entity.go @@ -0,0 +1,19 @@ +package model + +import ( + "github.com/feast-dev/feast/go/protos/feast/core" + "github.com/feast-dev/feast/go/protos/feast/types" +) + +type Entity struct { + Name string + ValueType types.ValueType_Enum + JoinKey string +} + +func NewEntityFromProto(proto *core.Entity) *Entity { + return &Entity{Name: proto.Spec.Name, + ValueType: proto.Spec.ValueType, + JoinKey: proto.Spec.JoinKey, + } +} diff --git a/go/internal/feast/model/feature.go b/go/internal/feast/model/feature.go new file mode 100644 index 0000000000..d833a8901b --- /dev/null +++ b/go/internal/feast/model/feature.go @@ -0,0 +1,17 @@ +package model + +import ( + "github.com/feast-dev/feast/go/protos/feast/core" + "github.com/feast-dev/feast/go/protos/feast/types" +) + +type Feature struct { + Name string + Dtype types.ValueType_Enum +} + +func NewFeatureFromProto(proto *core.FeatureSpecV2) *Feature { + return &Feature{Name: proto.Name, + Dtype: proto.ValueType, + } +} diff --git a/go/internal/feast/model/featureservice.go b/go/internal/feast/model/featureservice.go new file mode 100644 index 0000000000..5619dd9042 --- /dev/null +++ b/go/internal/feast/model/featureservice.go @@ -0,0 +1,27 @@ +package model + +import ( + "github.com/feast-dev/feast/go/protos/feast/core" + timestamppb "google.golang.org/protobuf/types/known/timestamppb" +) + +type FeatureService struct { + Name string + Project string + CreatedTimestamp *timestamppb.Timestamp + LastUpdatedTimestamp *timestamppb.Timestamp + Projections []*FeatureViewProjection +} + +func NewFeatureServiceFromProto(proto *core.FeatureService) *FeatureService { + projections := make([]*FeatureViewProjection, len(proto.Spec.Features)) + for index, projectionProto := range proto.Spec.Features { + projections[index] = NewFeatureViewProjectionFromProto(projectionProto) + } + return &FeatureService{Name: proto.Spec.Name, + Project: proto.Spec.Project, + CreatedTimestamp: proto.Meta.CreatedTimestamp, + LastUpdatedTimestamp: proto.Meta.LastUpdatedTimestamp, + Projections: projections, + } +} diff --git a/go/internal/feast/model/featureview.go b/go/internal/feast/model/featureview.go new file mode 100644 index 0000000000..85fc7a60ee --- /dev/null +++ b/go/internal/feast/model/featureview.go @@ -0,0 +1,51 @@ +package model + +import ( + "github.com/feast-dev/feast/go/protos/feast/core" + "github.com/feast-dev/feast/go/protos/feast/types" + durationpb "google.golang.org/protobuf/types/known/durationpb" +) + +const ( + DUMMY_ENTITY_ID = "__dummy_id" + DUMMY_ENTITY_NAME = "__dummy" + DUMMY_ENTITY_VAL = "" +) + +var DUMMY_ENTITY types.Value = types.Value{Val: &types.Value_StringVal{StringVal: DUMMY_ENTITY_VAL}} + +type FeatureView struct { + Base *BaseFeatureView + Ttl *durationpb.Duration + Entities []string +} + +func NewFeatureViewFromProto(proto *core.FeatureView) *FeatureView { + featureView := &FeatureView{Base: NewBaseFeatureView(proto.Spec.Name, proto.Spec.Features), + Ttl: &(*proto.Spec.Ttl), + } + if len(proto.Spec.Entities) == 0 { + featureView.Entities = []string{DUMMY_ENTITY_NAME} + } else { + featureView.Entities = proto.Spec.Entities + } + return featureView +} + +func (fs *FeatureView) NewFeatureViewFromBase(base *BaseFeatureView) *FeatureView { + ttl := durationpb.Duration{Seconds: fs.Ttl.Seconds, Nanos: fs.Ttl.Nanos} + featureView := &FeatureView{Base: base, + Ttl: &ttl, + Entities: fs.Entities, + } + return featureView +} + +func (fs *FeatureView) HasEntity(lookup string) bool { + for _, entityName := range fs.Entities { + if entityName == lookup { + return true + } + } + return false +} diff --git a/go/internal/feast/model/featureviewprojection.go b/go/internal/feast/model/featureviewprojection.go new file mode 100644 index 0000000000..e80e8844ed --- /dev/null +++ b/go/internal/feast/model/featureviewprojection.go @@ -0,0 +1,41 @@ +package model + +import ( + "github.com/feast-dev/feast/go/protos/feast/core" +) + +type FeatureViewProjection struct { + Name string + NameAlias string + Features []*Feature + JoinKeyMap map[string]string +} + +func (fv *FeatureViewProjection) NameToUse() string { + if len(fv.NameAlias) == 0 { + return fv.Name + } + return fv.NameAlias +} + +func NewFeatureViewProjectionFromProto(proto *core.FeatureViewProjection) *FeatureViewProjection { + featureProjection := &FeatureViewProjection{Name: proto.FeatureViewName, + NameAlias: proto.FeatureViewNameAlias, + JoinKeyMap: proto.JoinKeyMap, + } + + features := make([]*Feature, len(proto.FeatureColumns)) + for index, featureSpecV2 := range proto.FeatureColumns { + features[index] = NewFeatureFromProto(featureSpecV2) + } + featureProjection.Features = features + return featureProjection +} + +func NewFeatureViewProjectionFromDefinition(base *BaseFeatureView) *FeatureViewProjection { + return &FeatureViewProjection{Name: base.Name, + NameAlias: "", + Features: base.Features, + JoinKeyMap: make(map[string]string), + } +} diff --git a/go/internal/feast/model/ondemandfeatureview.go b/go/internal/feast/model/ondemandfeatureview.go new file mode 100644 index 0000000000..b7a352cc20 --- /dev/null +++ b/go/internal/feast/model/ondemandfeatureview.go @@ -0,0 +1,69 @@ +package model + +import ( + "github.com/feast-dev/feast/go/protos/feast/core" + "github.com/feast-dev/feast/go/protos/feast/types" +) + +type OnDemandFeatureView struct { + Base *BaseFeatureView + SourceFeatureViewProjections map[string]*FeatureViewProjection + SourceRequestDataSources map[string]*core.DataSource_RequestDataOptions +} + +func NewOnDemandFeatureViewFromProto(proto *core.OnDemandFeatureView) *OnDemandFeatureView { + onDemandFeatureView := &OnDemandFeatureView{Base: NewBaseFeatureView(proto.Spec.Name, proto.Spec.Features), + SourceFeatureViewProjections: make(map[string]*FeatureViewProjection), + SourceRequestDataSources: make(map[string]*core.DataSource_RequestDataOptions), + } + for sourceName, onDemandSource := range proto.Spec.Sources { + if onDemandSourceFeatureView, ok := onDemandSource.Source.(*core.OnDemandSource_FeatureView); ok { + featureViewProto := onDemandSourceFeatureView.FeatureView + featureView := NewFeatureViewFromProto(featureViewProto) + onDemandFeatureView.SourceFeatureViewProjections[sourceName] = featureView.Base.Projection + } else if onDemandSourceFeatureViewProjection, ok := onDemandSource.Source.(*core.OnDemandSource_FeatureViewProjection); ok { + featureProjectionProto := onDemandSourceFeatureViewProjection.FeatureViewProjection + onDemandFeatureView.SourceFeatureViewProjections[sourceName] = NewFeatureViewProjectionFromProto(featureProjectionProto) + } else if onDemandSourceRequestFeatureView, ok := onDemandSource.Source.(*core.OnDemandSource_RequestDataSource); ok { + + if dataSourceRequestOptions, ok := onDemandSourceRequestFeatureView.RequestDataSource.Options.(*core.DataSource_RequestDataOptions_); ok { + onDemandFeatureView.SourceRequestDataSources[sourceName] = dataSourceRequestOptions.RequestDataOptions + } + } + } + + return onDemandFeatureView +} + +func (fs *OnDemandFeatureView) NewWithProjection(projection *FeatureViewProjection) (*OnDemandFeatureView, error) { + projectedBase, err := fs.Base.WithProjection(projection) + if err != nil { + return nil, err + } + featureView := &OnDemandFeatureView{ + Base: projectedBase, + SourceFeatureViewProjections: fs.SourceFeatureViewProjections, + SourceRequestDataSources: fs.SourceRequestDataSources, + } + return featureView, nil +} + +func NewOnDemandFeatureViewFromBase(base *BaseFeatureView) *OnDemandFeatureView { + + featureView := &OnDemandFeatureView{Base: base} + return featureView +} + +func (fs *OnDemandFeatureView) ProjectWithFeatures(featureNames []string) (*OnDemandFeatureView, error) { + return fs.NewWithProjection(fs.Base.ProjectWithFeatures(featureNames)) +} + +func (fs *OnDemandFeatureView) GetRequestDataSchema() map[string]types.ValueType_Enum { + schema := make(map[string]types.ValueType_Enum) + for _, requestDataSource := range fs.SourceRequestDataSources { + for _, featureSpec := range requestDataSource.Schema { + schema[featureSpec.Name] = featureSpec.ValueType + } + } + return schema +} diff --git a/go/internal/feast/onlineserving/serving.go b/go/internal/feast/onlineserving/serving.go new file mode 100644 index 0000000000..381ba5f0f2 --- /dev/null +++ b/go/internal/feast/onlineserving/serving.go @@ -0,0 +1,640 @@ +package onlineserving + +import ( + "crypto/sha256" + "errors" + "fmt" + "sort" + "strings" + + "github.com/apache/arrow/go/v8/arrow" + "github.com/apache/arrow/go/v8/arrow/memory" + "github.com/feast-dev/feast/go/internal/feast/model" + "github.com/feast-dev/feast/go/internal/feast/onlinestore" + "github.com/feast-dev/feast/go/protos/feast/serving" + prototypes "github.com/feast-dev/feast/go/protos/feast/types" + "github.com/feast-dev/feast/go/types" + "github.com/golang/protobuf/proto" + "google.golang.org/protobuf/types/known/durationpb" + "google.golang.org/protobuf/types/known/timestamppb" +) + +/* + FeatureVector type represent result of retrieving single feature for multiple rows. + It can be imagined as a column in output dataframe / table. + It contains of feature name, list of values (across all rows), + list of statuses and list of timestamp. All these lists have equal length. + And this length is also equal to number of entity rows received in request. +*/ +type FeatureVector struct { + Name string + Values arrow.Array + Statuses []serving.FieldStatus + Timestamps []*timestamppb.Timestamp +} + +type FeatureViewAndRefs struct { + View *model.FeatureView + FeatureRefs []string +} + +/* + We group all features from a single request by entities they attached to. + Thus, we will be able to call online retrieval per entity and not per each feature View. + In this struct we collect all features and views that belongs to a group. + We also store here projected entity keys (only ones that needed to retrieve these features) + and indexes to map result of retrieval into output response. +*/ +type GroupedFeaturesPerEntitySet struct { + // A list of requested feature references of the form featureViewName:featureName that share this entity set + FeatureNames []string + FeatureViewNames []string + // full feature references as they supposed to appear in response + AliasedFeatureNames []string + // Entity set as a list of EntityKeys to pass to OnlineRead + EntityKeys []*prototypes.EntityKey + // Reversed mapping to project result of retrieval from storage to response + Indices [][]int +} + +/* + Return + (1) requested feature views and features grouped per View + (2) requested on demand feature views + existed in the registry + +*/ +func GetFeatureViewsToUseByService( + featureService *model.FeatureService, + featureViews map[string]*model.FeatureView, + onDemandFeatureViews map[string]*model.OnDemandFeatureView) ([]*FeatureViewAndRefs, []*model.OnDemandFeatureView, error) { + + viewNameToViewAndRefs := make(map[string]*FeatureViewAndRefs) + odFvsToUse := make([]*model.OnDemandFeatureView, 0) + + for _, featureProjection := range featureService.Projections { + // Create copies of FeatureView that may contains the same *FeatureView but + // each differentiated by a *FeatureViewProjection + featureViewName := featureProjection.Name + if fv, ok := featureViews[featureViewName]; ok { + base, err := fv.Base.WithProjection(featureProjection) + if err != nil { + return nil, nil, err + } + if _, ok := viewNameToViewAndRefs[featureProjection.NameToUse()]; !ok { + viewNameToViewAndRefs[featureProjection.NameToUse()] = &FeatureViewAndRefs{ + View: fv.NewFeatureViewFromBase(base), + FeatureRefs: []string{}, + } + } + + for _, feature := range featureProjection.Features { + viewNameToViewAndRefs[featureProjection.NameToUse()].FeatureRefs = + addStringIfNotContains(viewNameToViewAndRefs[featureProjection.NameToUse()].FeatureRefs, + feature.Name) + } + + } else if odFv, ok := onDemandFeatureViews[featureViewName]; ok { + projectedOdFv, err := odFv.NewWithProjection(featureProjection) + if err != nil { + return nil, nil, err + } + odFvsToUse = append(odFvsToUse, projectedOdFv) + err = extractOdFvDependencies( + projectedOdFv, + featureViews, + viewNameToViewAndRefs) + if err != nil { + return nil, nil, err + } + } else { + return nil, nil, fmt.Errorf("the provided feature service %s contains a reference to a feature View"+ + "%s which doesn't exist, please make sure that you have created the feature View"+ + "%s and that you have registered it by running \"apply\"", featureService.Name, featureViewName, featureViewName) + } + } + + fvsToUse := make([]*FeatureViewAndRefs, 0) + for _, viewAndRef := range viewNameToViewAndRefs { + fvsToUse = append(fvsToUse, viewAndRef) + } + + return fvsToUse, odFvsToUse, nil +} + +/* + Return + (1) requested feature views and features grouped per View + (2) requested on demand feature views + existed in the registry +*/ +func GetFeatureViewsToUseByFeatureRefs( + features []string, + featureViews map[string]*model.FeatureView, + onDemandFeatureViews map[string]*model.OnDemandFeatureView) ([]*FeatureViewAndRefs, []*model.OnDemandFeatureView, error) { + viewNameToViewAndRefs := make(map[string]*FeatureViewAndRefs) + odFvToFeatures := make(map[string][]string) + + for _, featureRef := range features { + featureViewName, featureName, err := ParseFeatureReference(featureRef) + if err != nil { + return nil, nil, err + } + if fv, ok := featureViews[featureViewName]; ok { + if viewAndRef, ok := viewNameToViewAndRefs[fv.Base.Name]; ok { + viewAndRef.FeatureRefs = addStringIfNotContains(viewAndRef.FeatureRefs, featureName) + } else { + viewNameToViewAndRefs[fv.Base.Name] = &FeatureViewAndRefs{ + View: fv, + FeatureRefs: []string{featureName}, + } + } + } else if odfv, ok := onDemandFeatureViews[featureViewName]; ok { + if _, ok := odFvToFeatures[odfv.Base.Name]; !ok { + odFvToFeatures[odfv.Base.Name] = []string{featureName} + } else { + odFvToFeatures[odfv.Base.Name] = append( + odFvToFeatures[odfv.Base.Name], featureName) + } + } else { + return nil, nil, fmt.Errorf("feature View %s doesn't exist, please make sure that you have created the"+ + " feature View %s and that you have registered it by running \"apply\"", featureViewName, featureViewName) + } + } + + odFvsToUse := make([]*model.OnDemandFeatureView, 0) + + for odFvName, featureNames := range odFvToFeatures { + projectedOdFv, err := onDemandFeatureViews[odFvName].ProjectWithFeatures(featureNames) + if err != nil { + return nil, nil, err + } + + err = extractOdFvDependencies( + projectedOdFv, + featureViews, + viewNameToViewAndRefs) + if err != nil { + return nil, nil, err + } + odFvsToUse = append(odFvsToUse, projectedOdFv) + } + + fvsToUse := make([]*FeatureViewAndRefs, 0) + for _, viewAndRefs := range viewNameToViewAndRefs { + fvsToUse = append(fvsToUse, viewAndRefs) + } + + return fvsToUse, odFvsToUse, nil +} + +func extractOdFvDependencies( + odFv *model.OnDemandFeatureView, + sourceFvs map[string]*model.FeatureView, + requestedFeatures map[string]*FeatureViewAndRefs, +) error { + + for _, sourceFvProjection := range odFv.SourceFeatureViewProjections { + fv := sourceFvs[sourceFvProjection.Name] + base, err := fv.Base.WithProjection(sourceFvProjection) + if err != nil { + return err + } + newFv := fv.NewFeatureViewFromBase(base) + + if _, ok := requestedFeatures[sourceFvProjection.NameToUse()]; !ok { + requestedFeatures[sourceFvProjection.NameToUse()] = &FeatureViewAndRefs{ + View: newFv, + FeatureRefs: []string{}, + } + } + + for _, feature := range sourceFvProjection.Features { + requestedFeatures[sourceFvProjection.NameToUse()].FeatureRefs = addStringIfNotContains( + requestedFeatures[sourceFvProjection.NameToUse()].FeatureRefs, feature.Name) + } + } + + return nil +} + +func addStringIfNotContains(slice []string, element string) []string { + found := false + for _, item := range slice { + if element == item { + found = true + } + } + if !found { + slice = append(slice, element) + } + return slice +} + +func GetEntityMaps(requestedFeatureViews []*FeatureViewAndRefs, entities []*model.Entity) (map[string]string, map[string]interface{}, error) { + entityNameToJoinKeyMap := make(map[string]string) + expectedJoinKeysSet := make(map[string]interface{}) + + entitiesByName := make(map[string]*model.Entity) + + for _, entity := range entities { + entitiesByName[entity.Name] = entity + } + + for _, featuresAndView := range requestedFeatureViews { + featureView := featuresAndView.View + var joinKeyToAliasMap map[string]string + if featureView.Base.Projection != nil && featureView.Base.Projection.JoinKeyMap != nil { + joinKeyToAliasMap = featureView.Base.Projection.JoinKeyMap + } else { + joinKeyToAliasMap = map[string]string{} + } + + for _, entityName := range featureView.Entities { + joinKey := entitiesByName[entityName].JoinKey + entityNameToJoinKeyMap[entityName] = joinKey + + if alias, ok := joinKeyToAliasMap[joinKey]; ok { + expectedJoinKeysSet[alias] = nil + } else { + expectedJoinKeysSet[joinKey] = nil + } + } + } + return entityNameToJoinKeyMap, expectedJoinKeysSet, nil +} + +func ValidateEntityValues(joinKeyValues map[string]*prototypes.RepeatedValue, + requestData map[string]*prototypes.RepeatedValue, + expectedJoinKeysSet map[string]interface{}) (int, error) { + numRows := -1 + + for joinKey, values := range joinKeyValues { + if _, ok := expectedJoinKeysSet[joinKey]; !ok { + requestData[joinKey] = values + delete(joinKeyValues, joinKey) + // ToDo: when request data will be passed correctly (not as part of entity rows) + // ToDo: throw this error instead + // return 0, fmt.Errorf("JoinKey is not expected in this request: %s\n%v", JoinKey, expectedJoinKeysSet) + } else { + if numRows < 0 { + numRows = len(values.Val) + } else if len(values.Val) != numRows { + return -1, errors.New("valueError: All entity rows must have the same columns") + } + + } + } + + return numRows, nil +} + +func ValidateFeatureRefs(requestedFeatures []*FeatureViewAndRefs, fullFeatureNames bool) error { + featureRefCounter := make(map[string]int) + featureRefs := make([]string, 0) + for _, viewAndFeatures := range requestedFeatures { + for _, feature := range viewAndFeatures.FeatureRefs { + projectedViewName := viewAndFeatures.View.Base.Name + if viewAndFeatures.View.Base.Projection != nil { + projectedViewName = viewAndFeatures.View.Base.Projection.NameToUse() + } + + featureRefs = append(featureRefs, + fmt.Sprintf("%s:%s", projectedViewName, feature)) + } + } + + for _, featureRef := range featureRefs { + if fullFeatureNames { + featureRefCounter[featureRef]++ + } else { + _, featureName, _ := ParseFeatureReference(featureRef) + featureRefCounter[featureName]++ + } + + } + for featureName, occurrences := range featureRefCounter { + if occurrences == 1 { + delete(featureRefCounter, featureName) + } + } + if len(featureRefCounter) >= 1 { + collidedFeatureRefs := make([]string, 0) + for collidedFeatureRef := range featureRefCounter { + if fullFeatureNames { + collidedFeatureRefs = append(collidedFeatureRefs, collidedFeatureRef) + } else { + for _, featureRef := range featureRefs { + _, featureName, _ := ParseFeatureReference(featureRef) + if featureName == collidedFeatureRef { + collidedFeatureRefs = append(collidedFeatureRefs, featureRef) + } + } + } + } + return featureNameCollisionError{collidedFeatureRefs, fullFeatureNames} + } + + return nil +} + +func TransposeFeatureRowsIntoColumns(featureData2D [][]onlinestore.FeatureData, + groupRef *GroupedFeaturesPerEntitySet, + requestedFeatureViews []*FeatureViewAndRefs, + arrowAllocator memory.Allocator, + numRows int) ([]*FeatureVector, error) { + + numFeatures := len(groupRef.AliasedFeatureNames) + fvs := make(map[string]*model.FeatureView) + for _, viewAndRefs := range requestedFeatureViews { + fvs[viewAndRefs.View.Base.Name] = viewAndRefs.View + } + + var value *prototypes.Value + var status serving.FieldStatus + var eventTimeStamp *timestamppb.Timestamp + var featureData *onlinestore.FeatureData + var fv *model.FeatureView + var featureViewName string + + vectors := make([]*FeatureVector, 0) + + for featureIndex := 0; featureIndex < numFeatures; featureIndex++ { + currentVector := &FeatureVector{ + Name: groupRef.AliasedFeatureNames[featureIndex], + Statuses: make([]serving.FieldStatus, numRows), + Timestamps: make([]*timestamppb.Timestamp, numRows), + } + vectors = append(vectors, currentVector) + protoValues := make([]*prototypes.Value, numRows) + + for rowEntityIndex, outputIndexes := range groupRef.Indices { + if featureData2D[rowEntityIndex] == nil { + value = nil + status = serving.FieldStatus_NOT_FOUND + eventTimeStamp = ×tamppb.Timestamp{} + } else { + featureData = &featureData2D[rowEntityIndex][featureIndex] + eventTimeStamp = ×tamppb.Timestamp{Seconds: featureData.Timestamp.Seconds, Nanos: featureData.Timestamp.Nanos} + featureViewName = featureData.Reference.FeatureViewName + fv = fvs[featureViewName] + if _, ok := featureData.Value.Val.(*prototypes.Value_NullVal); ok { + value = nil + status = serving.FieldStatus_NOT_FOUND + } else if checkOutsideTtl(eventTimeStamp, timestamppb.Now(), fv.Ttl) { + value = &prototypes.Value{Val: featureData.Value.Val} + status = serving.FieldStatus_OUTSIDE_MAX_AGE + } else { + value = &prototypes.Value{Val: featureData.Value.Val} + status = serving.FieldStatus_PRESENT + } + } + for _, rowIndex := range outputIndexes { + protoValues[rowIndex] = value + currentVector.Statuses[rowIndex] = status + currentVector.Timestamps[rowIndex] = eventTimeStamp + } + } + arrowValues, err := types.ProtoValuesToArrowArray(protoValues, arrowAllocator, numRows) + if err != nil { + return nil, err + } + currentVector.Values = arrowValues + } + + return vectors, nil + +} + +func KeepOnlyRequestedFeatures( + vectors []*FeatureVector, + requestedFeatureRefs []string, + featureService *model.FeatureService, + fullFeatureNames bool) ([]*FeatureVector, error) { + vectorsByName := make(map[string]*FeatureVector) + expectedVectors := make([]*FeatureVector, 0) + + for _, vector := range vectors { + vectorsByName[vector.Name] = vector + } + + if featureService != nil { + for _, projection := range featureService.Projections { + for _, f := range projection.Features { + requestedFeatureRefs = append(requestedFeatureRefs, + fmt.Sprintf("%s:%s", projection.NameToUse(), f.Name)) + } + } + } + + for _, featureRef := range requestedFeatureRefs { + viewName, featureName, err := ParseFeatureReference(featureRef) + if err != nil { + return nil, err + } + qualifiedName := getQualifiedFeatureName(viewName, featureName, fullFeatureNames) + if _, ok := vectorsByName[qualifiedName]; !ok { + return nil, fmt.Errorf("requested feature %s can't be retrieved", featureRef) + } + expectedVectors = append(expectedVectors, vectorsByName[qualifiedName]) + } + + return expectedVectors, nil +} + +func EntitiesToFeatureVectors(entityColumns map[string]*prototypes.RepeatedValue, arrowAllocator memory.Allocator, numRows int) ([]*FeatureVector, error) { + vectors := make([]*FeatureVector, 0) + presentVector := make([]serving.FieldStatus, numRows) + timestampVector := make([]*timestamppb.Timestamp, numRows) + for idx := 0; idx < numRows; idx++ { + presentVector[idx] = serving.FieldStatus_PRESENT + timestampVector[idx] = timestamppb.Now() + } + for entityName, values := range entityColumns { + arrowColumn, err := types.ProtoValuesToArrowArray(values.Val, arrowAllocator, numRows) + if err != nil { + return nil, err + } + vectors = append(vectors, &FeatureVector{ + Name: entityName, + Values: arrowColumn, + Statuses: presentVector, + Timestamps: timestampVector, + }) + } + return vectors, nil +} + +func ParseFeatureReference(featureRef string) (featureViewName, featureName string, e error) { + parsedFeatureName := strings.Split(featureRef, ":") + + if len(parsedFeatureName) == 0 { + e = errors.New("featureReference should be in the format: 'FeatureViewName:FeatureName'") + } else if len(parsedFeatureName) == 1 { + featureName = parsedFeatureName[0] + } else { + featureViewName = parsedFeatureName[0] + featureName = parsedFeatureName[1] + } + return +} + +func entityKeysToProtos(joinKeyValues map[string]*prototypes.RepeatedValue) []*prototypes.EntityKey { + keys := make([]string, len(joinKeyValues)) + index := 0 + var numRows int + for k, v := range joinKeyValues { + keys[index] = k + index += 1 + numRows = len(v.Val) + } + sort.Strings(keys) + entityKeys := make([]*prototypes.EntityKey, numRows) + numJoinKeys := len(keys) + // Construct each EntityKey object + for index = 0; index < numRows; index++ { + entityKeys[index] = &prototypes.EntityKey{JoinKeys: keys, EntityValues: make([]*prototypes.Value, numJoinKeys)} + } + + for colIndex, key := range keys { + for index, value := range joinKeyValues[key].GetVal() { + entityKeys[index].EntityValues[colIndex] = value + } + } + return entityKeys +} + +func GroupFeatureRefs(requestedFeatureViews []*FeatureViewAndRefs, + joinKeyValues map[string]*prototypes.RepeatedValue, + entityNameToJoinKeyMap map[string]string, + fullFeatureNames bool, +) (map[string]*GroupedFeaturesPerEntitySet, + error, +) { + groups := make(map[string]*GroupedFeaturesPerEntitySet) + + for _, featuresAndView := range requestedFeatureViews { + joinKeys := make([]string, 0) + fv := featuresAndView.View + featureNames := featuresAndView.FeatureRefs + for _, entityName := range fv.Entities { + joinKeys = append(joinKeys, entityNameToJoinKeyMap[entityName]) + } + + groupKeyBuilder := make([]string, 0) + joinKeysValuesProjection := make(map[string]*prototypes.RepeatedValue) + + joinKeyToAliasMap := make(map[string]string) + if fv.Base.Projection != nil && fv.Base.Projection.JoinKeyMap != nil { + joinKeyToAliasMap = fv.Base.Projection.JoinKeyMap + } + + for _, joinKey := range joinKeys { + var joinKeyOrAlias string + + if alias, ok := joinKeyToAliasMap[joinKey]; ok { + groupKeyBuilder = append(groupKeyBuilder, fmt.Sprintf("%s[%s]", joinKey, alias)) + joinKeyOrAlias = alias + } else { + groupKeyBuilder = append(groupKeyBuilder, joinKey) + joinKeyOrAlias = joinKey + } + + if _, ok := joinKeyValues[joinKeyOrAlias]; !ok { + return nil, fmt.Errorf("key %s is missing in provided entity rows", joinKey) + } + joinKeysValuesProjection[joinKey] = joinKeyValues[joinKeyOrAlias] + } + + sort.Strings(groupKeyBuilder) + groupKey := strings.Join(groupKeyBuilder, ",") + + aliasedFeatureNames := make([]string, 0) + featureViewNames := make([]string, 0) + var viewNameToUse string + if fv.Base.Projection != nil { + viewNameToUse = fv.Base.Projection.NameToUse() + } else { + viewNameToUse = fv.Base.Name + } + + for _, featureName := range featureNames { + aliasedFeatureNames = append(aliasedFeatureNames, + getQualifiedFeatureName(viewNameToUse, featureName, fullFeatureNames)) + featureViewNames = append(featureViewNames, fv.Base.Name) + } + + if _, ok := groups[groupKey]; !ok { + joinKeysProto := entityKeysToProtos(joinKeysValuesProjection) + uniqueEntityRows, mappingIndices, err := getUniqueEntityRows(joinKeysProto) + if err != nil { + return nil, err + } + + groups[groupKey] = &GroupedFeaturesPerEntitySet{ + FeatureNames: featureNames, + FeatureViewNames: featureViewNames, + AliasedFeatureNames: aliasedFeatureNames, + Indices: mappingIndices, + EntityKeys: uniqueEntityRows, + } + + } else { + groups[groupKey].FeatureNames = append(groups[groupKey].FeatureNames, featureNames...) + groups[groupKey].AliasedFeatureNames = append(groups[groupKey].AliasedFeatureNames, aliasedFeatureNames...) + groups[groupKey].FeatureViewNames = append(groups[groupKey].FeatureViewNames, featureViewNames...) + } + } + return groups, nil +} + +func getUniqueEntityRows(joinKeysProto []*prototypes.EntityKey) ([]*prototypes.EntityKey, [][]int, error) { + uniqueValues := make(map[[sha256.Size]byte]*prototypes.EntityKey, 0) + positions := make(map[[sha256.Size]byte][]int, 0) + + for index, entityKey := range joinKeysProto { + serializedRow, err := proto.Marshal(entityKey) + if err != nil { + return nil, nil, err + } + + rowHash := sha256.Sum256(serializedRow) + if _, ok := uniqueValues[rowHash]; !ok { + uniqueValues[rowHash] = entityKey + positions[rowHash] = []int{index} + } else { + positions[rowHash] = append(positions[rowHash], index) + } + } + + mappingIndices := make([][]int, len(uniqueValues)) + uniqueEntityRows := make([]*prototypes.EntityKey, 0) + for rowHash, row := range uniqueValues { + nextIdx := len(uniqueEntityRows) + + mappingIndices[nextIdx] = positions[rowHash] + uniqueEntityRows = append(uniqueEntityRows, row) + } + return uniqueEntityRows, mappingIndices, nil +} + +func checkOutsideTtl(featureTimestamp *timestamppb.Timestamp, currentTimestamp *timestamppb.Timestamp, ttl *durationpb.Duration) bool { + return currentTimestamp.GetSeconds()-featureTimestamp.GetSeconds() > ttl.Seconds +} + +func getQualifiedFeatureName(viewName string, featureName string, fullFeatureNames bool) string { + if fullFeatureNames { + return fmt.Sprintf("%s__%s", viewName, featureName) + } else { + return featureName + } +} + +type featureNameCollisionError struct { + featureRefCollisions []string + fullFeatureNames bool +} + +func (e featureNameCollisionError) Error() string { + return fmt.Sprintf("featureNameCollisionError: %s; %t", strings.Join(e.featureRefCollisions, ", "), e.fullFeatureNames) +} diff --git a/go/internal/feast/onlineserving/serving_test.go b/go/internal/feast/onlineserving/serving_test.go new file mode 100644 index 0000000000..2f4cf8eaba --- /dev/null +++ b/go/internal/feast/onlineserving/serving_test.go @@ -0,0 +1,333 @@ +package onlineserving + +import ( + "testing" + + "github.com/feast-dev/feast/go/internal/feast/model" + "github.com/feast-dev/feast/go/protos/feast/core" + "github.com/feast-dev/feast/go/protos/feast/types" + "github.com/stretchr/testify/assert" + "google.golang.org/protobuf/types/known/durationpb" + "google.golang.org/protobuf/types/known/timestamppb" +) + +func TestGroupingFeatureRefs(t *testing.T) { + viewA := &model.FeatureView{ + Base: &model.BaseFeatureView{ + Name: "viewA", + Projection: &model.FeatureViewProjection{ + NameAlias: "aliasViewA", + }, + }, + Entities: []string{"driver", "customer"}, + } + viewB := &model.FeatureView{ + Base: &model.BaseFeatureView{Name: "viewB"}, + Entities: []string{"driver", "customer"}, + } + viewC := &model.FeatureView{ + Base: &model.BaseFeatureView{Name: "viewC"}, + Entities: []string{"driver"}, + } + viewD := &model.FeatureView{ + Base: &model.BaseFeatureView{Name: "viewD"}, + Entities: []string{"customer"}, + } + refGroups, _ := GroupFeatureRefs( + []*FeatureViewAndRefs{ + {View: viewA, FeatureRefs: []string{"featureA", "featureB"}}, + {View: viewB, FeatureRefs: []string{"featureC", "featureD"}}, + {View: viewC, FeatureRefs: []string{"featureE"}}, + {View: viewD, FeatureRefs: []string{"featureF"}}, + }, + map[string]*types.RepeatedValue{ + "driver_id": {Val: []*types.Value{ + {Val: &types.Value_Int32Val{Int32Val: 0}}, + {Val: &types.Value_Int32Val{Int32Val: 0}}, + {Val: &types.Value_Int32Val{Int32Val: 1}}, + {Val: &types.Value_Int32Val{Int32Val: 1}}, + {Val: &types.Value_Int32Val{Int32Val: 1}}, + }}, + "customer_id": {Val: []*types.Value{ + {Val: &types.Value_Int32Val{Int32Val: 1}}, + {Val: &types.Value_Int32Val{Int32Val: 2}}, + {Val: &types.Value_Int32Val{Int32Val: 3}}, + {Val: &types.Value_Int32Val{Int32Val: 3}}, + {Val: &types.Value_Int32Val{Int32Val: 4}}, + }}, + }, + map[string]string{ + "driver": "driver_id", + "customer": "customer_id", + }, + true, + ) + + assert.Len(t, refGroups, 3) + + // Group 1 + assert.Equal(t, []string{"featureA", "featureB", "featureC", "featureD"}, + refGroups["customer_id,driver_id"].FeatureNames) + assert.Equal(t, []string{"viewA", "viewA", "viewB", "viewB"}, + refGroups["customer_id,driver_id"].FeatureViewNames) + assert.Equal(t, []string{ + "aliasViewA__featureA", "aliasViewA__featureB", + "viewB__featureC", "viewB__featureD"}, + refGroups["customer_id,driver_id"].AliasedFeatureNames) + for _, group := range [][]int{{0}, {1}, {2, 3}, {4}} { + assert.Contains(t, refGroups["customer_id,driver_id"].Indices, group) + } + + // Group2 + assert.Equal(t, []string{"featureE"}, + refGroups["driver_id"].FeatureNames) + for _, group := range [][]int{{0, 1}, {2, 3, 4}} { + assert.Contains(t, refGroups["driver_id"].Indices, group) + } + + // Group3 + assert.Equal(t, []string{"featureF"}, + refGroups["customer_id"].FeatureNames) + + for _, group := range [][]int{{0}, {1}, {2, 3}, {4}} { + assert.Contains(t, refGroups["customer_id"].Indices, group) + } + +} + +func TestGroupingFeatureRefsWithJoinKeyAliases(t *testing.T) { + viewA := &model.FeatureView{ + Base: &model.BaseFeatureView{ + Name: "viewA", + Projection: &model.FeatureViewProjection{ + Name: "viewA", + JoinKeyMap: map[string]string{"location_id": "destination_id"}, + }, + }, + Entities: []string{"location"}, + } + viewB := &model.FeatureView{ + Base: &model.BaseFeatureView{Name: "viewB"}, + Entities: []string{"location"}, + } + + refGroups, _ := GroupFeatureRefs( + []*FeatureViewAndRefs{ + {View: viewA, FeatureRefs: []string{"featureA", "featureB"}}, + {View: viewB, FeatureRefs: []string{"featureC", "featureD"}}, + }, + map[string]*types.RepeatedValue{ + "location_id": {Val: []*types.Value{ + {Val: &types.Value_Int32Val{Int32Val: 0}}, + {Val: &types.Value_Int32Val{Int32Val: 0}}, + {Val: &types.Value_Int32Val{Int32Val: 1}}, + {Val: &types.Value_Int32Val{Int32Val: 1}}, + {Val: &types.Value_Int32Val{Int32Val: 1}}, + }}, + "destination_id": {Val: []*types.Value{ + {Val: &types.Value_Int32Val{Int32Val: 1}}, + {Val: &types.Value_Int32Val{Int32Val: 2}}, + {Val: &types.Value_Int32Val{Int32Val: 3}}, + {Val: &types.Value_Int32Val{Int32Val: 3}}, + {Val: &types.Value_Int32Val{Int32Val: 4}}, + }}, + }, + map[string]string{ + "location": "location_id", + }, + true, + ) + + assert.Len(t, refGroups, 2) + + assert.Equal(t, []string{"featureA", "featureB"}, + refGroups["location_id[destination_id]"].FeatureNames) + for _, group := range [][]int{{0}, {1}, {2, 3}, {4}} { + assert.Contains(t, refGroups["location_id[destination_id]"].Indices, group) + } + + assert.Equal(t, []string{"featureC", "featureD"}, + refGroups["location_id"].FeatureNames) + for _, group := range [][]int{{0, 1}, {2, 3, 4}} { + assert.Contains(t, refGroups["location_id"].Indices, group) + } + +} + +func TestGroupingFeatureRefsWithMissingKey(t *testing.T) { + viewA := &model.FeatureView{ + Base: &model.BaseFeatureView{ + Name: "viewA", + Projection: &model.FeatureViewProjection{ + Name: "viewA", + JoinKeyMap: map[string]string{"location_id": "destination_id"}, + }, + }, + Entities: []string{"location"}, + } + + _, err := GroupFeatureRefs( + []*FeatureViewAndRefs{ + {View: viewA, FeatureRefs: []string{"featureA", "featureB"}}, + }, + map[string]*types.RepeatedValue{ + "location_id": {Val: []*types.Value{ + {Val: &types.Value_Int32Val{Int32Val: 0}}, + }}, + }, + map[string]string{ + "location": "location_id", + }, + true, + ) + assert.Errorf(t, err, "key destination_id is missing in provided entity rows") +} + +func createFeature(name string, valueType types.ValueType_Enum) *core.FeatureSpecV2 { + return &core.FeatureSpecV2{ + Name: name, + ValueType: valueType, + } +} + +func createFeatureView(name string, entities []string, features ...*core.FeatureSpecV2) *model.FeatureView { + viewProto := core.FeatureView{ + Spec: &core.FeatureViewSpec{ + Name: name, + Entities: entities, + Features: features, + Ttl: &durationpb.Duration{}, + }, + } + return model.NewFeatureViewFromProto(&viewProto) +} + +func createFeatureService(viewProjections map[string][]*core.FeatureSpecV2) *model.FeatureService { + projections := make([]*core.FeatureViewProjection, 0) + for name, features := range viewProjections { + projections = append(projections, &core.FeatureViewProjection{ + FeatureViewName: name, + FeatureColumns: features, + JoinKeyMap: map[string]string{}, + }) + } + + fsProto := core.FeatureService{ + Spec: &core.FeatureServiceSpec{ + Features: projections, + }, + Meta: &core.FeatureServiceMeta{ + LastUpdatedTimestamp: timestamppb.Now(), + CreatedTimestamp: timestamppb.Now(), + }, + } + + return model.NewFeatureServiceFromProto(&fsProto) +} + +func createOnDemandFeatureView(name string, featureSources map[string][]*core.FeatureSpecV2, features ...*core.FeatureSpecV2) *model.OnDemandFeatureView { + sources := make(map[string]*core.OnDemandSource) + for viewName, features := range featureSources { + sources[viewName] = &core.OnDemandSource{ + Source: &core.OnDemandSource_FeatureViewProjection{ + FeatureViewProjection: &core.FeatureViewProjection{ + FeatureViewName: viewName, + FeatureColumns: features, + JoinKeyMap: map[string]string{}, + }, + }, + } + } + + proto := &core.OnDemandFeatureView{ + Spec: &core.OnDemandFeatureViewSpec{ + Name: name, + Sources: sources, + Features: features, + }, + } + return model.NewOnDemandFeatureViewFromProto(proto) +} + +func TestUnpackFeatureService(t *testing.T) { + featASpec := createFeature("featA", types.ValueType_INT32) + featBSpec := createFeature("featB", types.ValueType_INT32) + featCSpec := createFeature("featC", types.ValueType_INT32) + featDSpec := createFeature("featD", types.ValueType_INT32) + featESpec := createFeature("featE", types.ValueType_FLOAT) + onDemandFeature1 := createFeature("featF", types.ValueType_FLOAT) + onDemandFeature2 := createFeature("featG", types.ValueType_FLOAT) + + viewA := createFeatureView("viewA", []string{"entity"}, featASpec, featBSpec) + viewB := createFeatureView("viewB", []string{"entity"}, featCSpec, featDSpec) + viewC := createFeatureView("viewC", []string{"entity"}, featESpec) + onDemandView := createOnDemandFeatureView( + "odfv", + map[string][]*core.FeatureSpecV2{"viewB": {featCSpec}, "viewC": {featESpec}}, + onDemandFeature1, onDemandFeature2) + + fs := createFeatureService(map[string][]*core.FeatureSpecV2{ + "viewA": {featASpec, featBSpec}, + "viewB": {featCSpec}, + "odfv": {onDemandFeature2}, + }) + + fvs, odfvs, err := GetFeatureViewsToUseByService( + fs, + map[string]*model.FeatureView{"viewA": viewA, "viewB": viewB, "viewC": viewC}, + map[string]*model.OnDemandFeatureView{"odfv": onDemandView}) + + assertCorrectUnpacking(t, fvs, odfvs, err) +} + +func assertCorrectUnpacking(t *testing.T, fvs []*FeatureViewAndRefs, odfvs []*model.OnDemandFeatureView, err error) { + assert.Nil(t, err) + assert.Len(t, fvs, 3) + assert.Len(t, odfvs, 1) + + fvsByName := make(map[string]*FeatureViewAndRefs) + for _, fv := range fvs { + fvsByName[fv.View.Base.Name] = fv + } + + // feature views and features as declared in service + assert.Equal(t, []string{"featA", "featB"}, fvsByName["viewA"].FeatureRefs) + assert.Equal(t, []string{"featC"}, fvsByName["viewB"].FeatureRefs) + + // dependency of the on demand feature view + assert.Equal(t, []string{"featE"}, fvsByName["viewC"].FeatureRefs) + + // only requested features projected + assert.Len(t, odfvs[0].Base.Projection.Features, 1) + assert.Equal(t, "featG", odfvs[0].Base.Projection.Features[0].Name) +} + +func TestUnpackFeatureViewsByReferences(t *testing.T) { + featASpec := createFeature("featA", types.ValueType_INT32) + featBSpec := createFeature("featB", types.ValueType_INT32) + featCSpec := createFeature("featC", types.ValueType_INT32) + featDSpec := createFeature("featD", types.ValueType_INT32) + featESpec := createFeature("featE", types.ValueType_FLOAT) + onDemandFeature1 := createFeature("featF", types.ValueType_FLOAT) + onDemandFeature2 := createFeature("featG", types.ValueType_FLOAT) + + viewA := createFeatureView("viewA", []string{"entity"}, featASpec, featBSpec) + viewB := createFeatureView("viewB", []string{"entity"}, featCSpec, featDSpec) + viewC := createFeatureView("viewC", []string{"entity"}, featESpec) + onDemandView := createOnDemandFeatureView( + "odfv", + map[string][]*core.FeatureSpecV2{"viewB": {featCSpec}, "viewC": {featESpec}}, + onDemandFeature1, onDemandFeature2) + + fvs, odfvs, err := GetFeatureViewsToUseByFeatureRefs( + []string{ + "viewA:featA", + "viewA:featB", + "viewB:featC", + "odfv:featG", + }, + map[string]*model.FeatureView{"viewA": viewA, "viewB": viewB, "viewC": viewC}, + map[string]*model.OnDemandFeatureView{"odfv": onDemandView}) + + assertCorrectUnpacking(t, fvs, odfvs, err) +} diff --git a/go/internal/feast/onlinestore/onlinestore.go b/go/internal/feast/onlinestore/onlinestore.go new file mode 100644 index 0000000000..b4a2571480 --- /dev/null +++ b/go/internal/feast/onlinestore/onlinestore.go @@ -0,0 +1,65 @@ +package onlinestore + +import ( + "context" + "fmt" + "github.com/feast-dev/feast/go/internal/feast/registry" + + "github.com/feast-dev/feast/go/protos/feast/serving" + "github.com/feast-dev/feast/go/protos/feast/types" + "github.com/golang/protobuf/ptypes/timestamp" +) + +type FeatureData struct { + Reference serving.FeatureReferenceV2 + Timestamp timestamp.Timestamp + Value types.Value +} + +type OnlineStore interface { + // OnlineRead reads multiple features (specified in featureReferences) for multiple + // entity keys (specified in entityKeys) and returns an array of array of features, + // where each feature contains 3 fields: + // 1. feature Reference + // 2. feature event timestamp + // 3. feature value + // The inner array will have the same size as featureReferences, + // while the outer array will have the same size as entityKeys. + + // TODO: Can we return [][]FeatureData, []timstamps, error + // instead and remove timestamp from FeatureData struct to mimic Python's code + // and reduces repeated memory storage for the same timstamp (which is stored as value and not as a pointer). + // Should each attribute in FeatureData be stored as a pointer instead since the current + // design forces value copied in OnlineRead + GetOnlineFeatures + // (array is destructed so we cannot use the same fields in each + // Feature object as pointers in GetOnlineFeaturesResponse) + // => allocate memory for each field once in OnlineRead + // and reuse them in GetOnlineFeaturesResponse? + OnlineRead(ctx context.Context, entityKeys []*types.EntityKey, featureViewNames []string, featureNames []string) ([][]FeatureData, error) + // Destruct must be call once user is done using OnlineStore + // This is to comply with the Connector since we have to close the plugin + Destruct() +} + +func getOnlineStoreType(onlineStoreConfig map[string]interface{}) (string, bool) { + if onlineStoreType, ok := onlineStoreConfig["type"]; !ok { + return "", false + } else { + result, ok := onlineStoreType.(string) + return result, ok + } +} + +func NewOnlineStore(config *registry.RepoConfig) (OnlineStore, error) { + onlineStoreType, ok := getOnlineStoreType(config.OnlineStore) + if !ok { + onlineStore, err := NewSqliteOnlineStore(config.Project, config, config.OnlineStore) + return onlineStore, err + } + if onlineStoreType == "redis" { + onlineStore, err := NewRedisOnlineStore(config.Project, config.OnlineStore) + return onlineStore, err + } else { + return nil, fmt.Errorf("%s online store type is currently not supported; only redis and sqlite are supported", onlineStoreType) + } +} diff --git a/go/internal/feast/onlinestore/redisonlinestore.go b/go/internal/feast/onlinestore/redisonlinestore.go new file mode 100644 index 0000000000..9049eae103 --- /dev/null +++ b/go/internal/feast/onlinestore/redisonlinestore.go @@ -0,0 +1,358 @@ +package onlinestore + +import ( + "context" + "encoding/binary" + "errors" + "fmt" + "sort" + "strconv" + "strings" + + "github.com/feast-dev/feast/go/protos/feast/serving" + "github.com/feast-dev/feast/go/protos/feast/types" + "github.com/go-redis/redis/v8" + "github.com/golang/protobuf/proto" + "github.com/spaolacci/murmur3" + timestamppb "google.golang.org/protobuf/types/known/timestamppb" +) + +type redisType int + +const ( + redisNode redisType = 0 + redisCluster redisType = 1 +) + +type RedisOnlineStore struct { + + // Feast project name + // TODO (woop): Should we remove project as state that is tracked at the store level? + project string + + // Redis database type, either a single node server (RedisType.Redis) or a cluster (RedisType.RedisCluster) + t redisType + + // Redis client connector + client *redis.Client +} + +func NewRedisOnlineStore(project string, onlineStoreConfig map[string]interface{}) (*RedisOnlineStore, error) { + store := RedisOnlineStore{project: project} + + var address []string + var password string + var db int // Default to 0 + + // Parse redis_type and write it into conf.t + t, err := getRedisType(onlineStoreConfig) + if err != nil { + return nil, err + } + + // Parse connection_string and write it into conf.address, conf.password, and conf.ssl + redisConnJson, ok := onlineStoreConfig["connection_string"] + if !ok { + // Default to "localhost:6379" + redisConnJson = "localhost:6379" + } + if redisConnStr, ok := redisConnJson.(string); !ok { + return nil, errors.New(fmt.Sprintf("failed to convert connection_string to string: %+v", redisConnJson)) + } else { + parts := strings.Split(redisConnStr, ",") + for _, part := range parts { + if strings.Contains(part, ":") { + address = append(address, part) + } else if strings.Contains(part, "=") { + kv := strings.SplitN(part, "=", 2) + if kv[0] == "password" { + password = kv[1] + } else if kv[0] == "ssl" { + // TODO (woop): Add support for TLS/SSL + // ssl = kv[1] == "true" + } else if kv[0] == "db" { + db, err = strconv.Atoi(kv[1]) + if err != nil { + return nil, err + } + } else { + return nil, errors.New(fmt.Sprintf("unrecognized option in connection_string: %s. Must be one of 'password', 'ssl'", kv[0])) + } + } else { + return nil, errors.New(fmt.Sprintf("unable to parse a part of connection_string: %s. Must contain either ':' (addresses) or '=' (options", part)) + } + } + } + + if t == redisNode { + store.client = redis.NewClient(&redis.Options{ + Addr: address[0], + Password: password, // No password set + DB: db, + }) + } else { + return nil, errors.New("only single node Redis is supported at this time") + } + + return &store, nil +} + +func getRedisType(onlineStoreConfig map[string]interface{}) (redisType, error) { + var t redisType + + redisTypeJson, ok := onlineStoreConfig["redis_type"] + if !ok { + // Default to "redis" + redisTypeJson = "redis" + } else if redisTypeStr, ok := redisTypeJson.(string); !ok { + return -1, errors.New(fmt.Sprintf("failed to convert redis_type to string: %+v", redisTypeJson)) + } else { + if redisTypeStr == "redis" { + t = redisNode + } else if redisTypeStr == "redis_cluster" { + t = redisCluster + } else { + return -1, errors.New(fmt.Sprintf("failed to convert redis_type to enum: %s. Must be one of 'redis', 'redis_cluster'", redisTypeStr)) + } + } + return t, nil +} + +func (r *RedisOnlineStore) OnlineRead(ctx context.Context, entityKeys []*types.EntityKey, featureViewNames []string, featureNames []string) ([][]FeatureData, error) { + featureCount := len(featureNames) + index := featureCount + featureViewIndices := make(map[string]int) + indicesFeatureView := make(map[int]string) + for _, featureViewName := range featureViewNames { + if _, ok := featureViewIndices[featureViewName]; !ok { + featureViewIndices[featureViewName] = index + indicesFeatureView[index] = featureViewName + index += 1 + } + } + var hsetKeys = make([]string, index) + h := murmur3.New32() + intBuffer := h.Sum32() + byteBuffer := make([]byte, 4) + + for i := 0; i < featureCount; i++ { + h.Write([]byte(fmt.Sprintf("%s:%s", featureViewNames[i], featureNames[i]))) + intBuffer = h.Sum32() + binary.LittleEndian.PutUint32(byteBuffer, intBuffer) + hsetKeys[i] = string(byteBuffer) + h.Reset() + } + for i := featureCount; i < index; i++ { + view := indicesFeatureView[i] + tsKey := fmt.Sprintf("_ts:%s", view) + hsetKeys[i] = tsKey + featureNames = append(featureNames, tsKey) + } + + redisKeys := make([]*[]byte, len(entityKeys)) + redisKeyToEntityIndex := make(map[string]int) + for i := 0; i < len(entityKeys); i++ { + + var key, err = buildRedisKey(r.project, entityKeys[i]) + if err != nil { + return nil, err + } + redisKeys[i] = key + redisKeyToEntityIndex[string(*key)] = i + } + + // Retrieve features from Redis + // TODO: Move context object out + + results := make([][]FeatureData, len(entityKeys)) + pipe := r.client.Pipeline() + commands := map[string]*redis.SliceCmd{} + + for _, redisKey := range redisKeys { + keyString := string(*redisKey) + commands[keyString] = pipe.HMGet(ctx, keyString, hsetKeys...) + } + + _, err := pipe.Exec(ctx) + if err != nil { + return nil, err + } + + var entityIndex int + var resContainsNonNil bool + for redisKey, values := range commands { + + entityIndex = redisKeyToEntityIndex[redisKey] + resContainsNonNil = false + + results[entityIndex] = make([]FeatureData, featureCount) + res, err := values.Result() + if err != nil { + return nil, err + } + + var timeStamp timestamppb.Timestamp + + for featureIndex, resString := range res { + if featureIndex == featureCount { + break + } + + if resString == nil { + // TODO (Ly): Can there be nil result within each feature or they will all be returned as string proto of types.Value_NullVal proto? + featureName := featureNames[featureIndex] + featureViewName := featureViewNames[featureIndex] + timeStampIndex := featureViewIndices[featureViewName] + timeStampInterface := res[timeStampIndex] + if timeStampInterface != nil { + if timeStampString, ok := timeStampInterface.(string); !ok { + return nil, errors.New("error parsing value from redis") + } else { + if err := proto.Unmarshal([]byte(timeStampString), &timeStamp); err != nil { + return nil, errors.New("error converting parsed redis value to timestamppb.Timestamp") + } + } + } + + results[entityIndex][featureIndex] = FeatureData{Reference: serving.FeatureReferenceV2{FeatureViewName: featureViewName, FeatureName: featureName}, + Timestamp: timestamppb.Timestamp{Seconds: timeStamp.Seconds, Nanos: timeStamp.Nanos}, + Value: types.Value{Val: &types.Value_NullVal{NullVal: types.Null_NULL}}, + } + + } else if valueString, ok := resString.(string); !ok { + return nil, errors.New("error parsing Value from redis") + } else { + resContainsNonNil = true + var value types.Value + if err := proto.Unmarshal([]byte(valueString), &value); err != nil { + return nil, errors.New("error converting parsed redis Value to types.Value") + } else { + featureName := featureNames[featureIndex] + featureViewName := featureViewNames[featureIndex] + timeStampIndex := featureViewIndices[featureViewName] + timeStampInterface := res[timeStampIndex] + if timeStampInterface != nil { + if timeStampString, ok := timeStampInterface.(string); !ok { + return nil, errors.New("error parsing Value from redis") + } else { + if err := proto.Unmarshal([]byte(timeStampString), &timeStamp); err != nil { + return nil, errors.New("error converting parsed redis Value to timestamppb.Timestamp") + } + } + } + results[entityIndex][featureIndex] = FeatureData{Reference: serving.FeatureReferenceV2{FeatureViewName: featureViewName, FeatureName: featureName}, + Timestamp: timestamppb.Timestamp{Seconds: timeStamp.Seconds, Nanos: timeStamp.Nanos}, + Value: types.Value{Val: value.Val}, + } + } + } + } + + if !resContainsNonNil { + results[entityIndex] = nil + } + + } + + return results, nil +} + +// Dummy destruct function to conform with plugin OnlineStore interface +func (r *RedisOnlineStore) Destruct() { + +} + +func buildRedisKey(project string, entityKey *types.EntityKey) (*[]byte, error) { + serKey, err := serializeEntityKey(entityKey) + if err != nil { + return nil, err + } + fullKey := append(*serKey, []byte(project)...) + return &fullKey, nil +} + +func serializeEntityKey(entityKey *types.EntityKey) (*[]byte, error) { + // Serialize entity key to a bytestring so that it can be used as a lookup key in a hash table. + + // Ensure that we have the right amount of join keys and entity values + if len(entityKey.JoinKeys) != len(entityKey.EntityValues) { + return nil, errors.New(fmt.Sprintf("the amount of join key names and entity values don't match: %s vs %s", entityKey.JoinKeys, entityKey.EntityValues)) + } + + // Make sure that join keys are sorted so that we have consistent key building + m := make(map[string]*types.Value) + + for i := 0; i < len(entityKey.JoinKeys); i++ { + m[entityKey.JoinKeys[i]] = entityKey.EntityValues[i] + } + + keys := make([]string, 0, len(m)) + for k := range entityKey.JoinKeys { + keys = append(keys, entityKey.JoinKeys[k]) + } + sort.Strings(keys) + + // Build the key + length := 5 * len(keys) + bufferList := make([][]byte, length) + + for i := 0; i < len(keys); i++ { + offset := i * 2 + byteBuffer := make([]byte, 4) + binary.LittleEndian.PutUint32(byteBuffer, uint32(types.ValueType_Enum_value["STRING"])) + bufferList[offset] = byteBuffer + bufferList[offset+1] = []byte(keys[i]) + } + + for i := 0; i < len(keys); i++ { + offset := (2 * len(keys)) + (i * 3) + value := m[keys[i]].GetVal() + + valueBytes, valueTypeBytes, err := serializeValue(value) + if err != nil { + return valueBytes, err + } + + typeBuffer := make([]byte, 4) + binary.LittleEndian.PutUint32(typeBuffer, uint32(valueTypeBytes)) + + lenBuffer := make([]byte, 4) + binary.LittleEndian.PutUint32(lenBuffer, uint32(len(*valueBytes))) + + bufferList[offset+0] = typeBuffer + bufferList[offset+1] = lenBuffer + bufferList[offset+2] = *valueBytes + } + + // Convert from an array of byte arrays to a single byte array + var entityKeyBuffer []byte + for i := 0; i < len(bufferList); i++ { + entityKeyBuffer = append(entityKeyBuffer, bufferList[i]...) + } + + return &entityKeyBuffer, nil +} + +func serializeValue(value interface{}) (*[]byte, types.ValueType_Enum, error) { + // TODO: Implement support for other types (at least the major types like ints, strings, bytes) + switch x := (value).(type) { + case *types.Value_StringVal: + valueString := []byte(x.StringVal) + return &valueString, types.ValueType_STRING, nil + case *types.Value_BytesVal: + return &x.BytesVal, types.ValueType_BYTES, nil + case *types.Value_Int32Val: + valueBuffer := make([]byte, 4) + binary.LittleEndian.PutUint32(valueBuffer, uint32(x.Int32Val)) + return &valueBuffer, types.ValueType_INT32, nil + case *types.Value_Int64Val: + // TODO (woop): We unfortunately have to use 32 bit here for backward compatibility :( + valueBuffer := make([]byte, 4) + binary.LittleEndian.PutUint32(valueBuffer, uint32(x.Int64Val)) + return &valueBuffer, types.ValueType_INT64, nil + case nil: + return nil, types.ValueType_INVALID, fmt.Errorf("could not detect type for %v", x) + default: + return nil, types.ValueType_INVALID, fmt.Errorf("could not detect type for %v", x) + } +} diff --git a/go/internal/feast/onlinestore/sqliteonlinestore.go b/go/internal/feast/onlinestore/sqliteonlinestore.go new file mode 100644 index 0000000000..f8c5325545 --- /dev/null +++ b/go/internal/feast/onlinestore/sqliteonlinestore.go @@ -0,0 +1,160 @@ +package onlinestore + +import ( + "crypto/sha1" + "database/sql" + "encoding/hex" + "errors" + "github.com/feast-dev/feast/go/internal/feast/registry" + "strings" + "sync" + "time" + + "context" + "fmt" + + "github.com/feast-dev/feast/go/protos/feast/serving" + "github.com/feast-dev/feast/go/protos/feast/types" + _ "github.com/mattn/go-sqlite3" + "google.golang.org/protobuf/proto" + timestamppb "google.golang.org/protobuf/types/known/timestamppb" +) + +type SqliteOnlineStore struct { + // Feast project name + project string + path string + db *sql.DB + db_mu sync.Mutex +} + +// Creates a new sqlite online store object. onlineStoreConfig should have relative path of database file with respect to repoConfig.repoPath. +func NewSqliteOnlineStore(project string, repoConfig *registry.RepoConfig, onlineStoreConfig map[string]interface{}) (*SqliteOnlineStore, error) { + store := SqliteOnlineStore{project: project} + if db_path, ok := onlineStoreConfig["path"]; !ok { + return nil, fmt.Errorf("cannot find sqlite path %s", db_path) + } else { + if dbPathStr, ok := db_path.(string); !ok { + return nil, fmt.Errorf("cannot find convert sqlite path to string %s", db_path) + } else { + store.path = fmt.Sprintf("%s/%s", repoConfig.RepoPath, dbPathStr) + db, err := initializeConnection(store.path) + if err != nil { + return nil, err + } + store.db = db + } + } + + return &store, nil +} + +func (s *SqliteOnlineStore) Destruct() { + s.db.Close() +} + +// Returns FeatureData 2D array. Each row corresponds to one entity Value and each column corresponds to a single feature where the number of columns should be +// same length as the length of featureNames. Reads from every table in featureViewNames with the entity keys described. +func (s *SqliteOnlineStore) OnlineRead(ctx context.Context, entityKeys []*types.EntityKey, featureViewNames []string, featureNames []string) ([][]FeatureData, error) { + featureCount := len(featureNames) + _, err := s.getConnection() + if err != nil { + return nil, err + } + project := s.project + results := make([][]FeatureData, len(entityKeys)) + entityNameToEntityIndex := make(map[string]int) + in_query := make([]string, len(entityKeys)) + serialized_entities := make([]interface{}, len(entityKeys)) + for i := 0; i < len(entityKeys); i++ { + serKey, err := serializeEntityKey(entityKeys[i]) + if err != nil { + return nil, err + } + // TODO: fix this, string conversion is not safe + entityNameToEntityIndex[hashSerializedEntityKey(serKey)] = i + // for IN clause in read query + in_query[i] = "?" + serialized_entities[i] = *serKey + } + featureNamesToIdx := make(map[string]int) + for idx, name := range featureNames { + featureNamesToIdx[name] = idx + } + + for _, featureViewName := range featureViewNames { + query_string := fmt.Sprintf(`SELECT entity_key, feature_name, Value, event_ts + FROM %s + WHERE entity_key IN (%s) + ORDER BY entity_key`, tableId(project, featureViewName), strings.Join(in_query, ",")) + rows, err := s.db.Query(query_string, serialized_entities...) + if err != nil { + return nil, err + } + defer rows.Close() + for rows.Next() { + var entity_key []byte + var feature_name string + var valueString []byte + var event_ts time.Time + var value types.Value + err = rows.Scan(&entity_key, &feature_name, &valueString, &event_ts) + if err != nil { + return nil, errors.New("error could not resolve row in query (entity key, feature name, value, event ts)") + } + if err := proto.Unmarshal(valueString, &value); err != nil { + return nil, errors.New("error converting parsed value to types.Value") + } + rowIdx := entityNameToEntityIndex[hashSerializedEntityKey(&entity_key)] + if results[rowIdx] == nil { + results[rowIdx] = make([]FeatureData, featureCount) + } + results[rowIdx][featureNamesToIdx[feature_name]] = FeatureData{Reference: serving.FeatureReferenceV2{FeatureViewName: featureViewName, FeatureName: feature_name}, + Timestamp: *timestamppb.New(event_ts), + Value: types.Value{Val: value.Val}, + } + } + } + return results, nil +} + +// Gets a sqlite connection and sets it to the online store and also returns a pointer to the connection. +func (s *SqliteOnlineStore) getConnection() (*sql.DB, error) { + s.db_mu.Lock() + defer s.db_mu.Unlock() + if s.db == nil { + if s.path == "" { + return nil, errors.New("no database path available") + } + db, err := initializeConnection(s.path) + s.db = db + if err != nil { + return nil, err + } + } + return s.db, nil +} + +// Constructs the table id from the project and table(featureViewName) string. +func tableId(project string, featureViewName string) string { + return fmt.Sprintf("%s_%s", project, featureViewName) +} + +// Creates a connection to the sqlite database and returns the connection. +func initializeConnection(db_path string) (*sql.DB, error) { + db, err := sql.Open("sqlite3", db_path) + if err != nil { + return nil, err + } + return db, nil +} + +func hashSerializedEntityKey(serializedEntityKey *[]byte) string { + if serializedEntityKey == nil { + return "" + } + h := sha1.New() + h.Write(*serializedEntityKey) + sha1_hash := hex.EncodeToString(h.Sum(nil)) + return sha1_hash +} diff --git a/go/internal/feast/onlinestore/sqliteonlinestore_test.go b/go/internal/feast/onlinestore/sqliteonlinestore_test.go new file mode 100644 index 0000000000..cbee9cd91c --- /dev/null +++ b/go/internal/feast/onlinestore/sqliteonlinestore_test.go @@ -0,0 +1,83 @@ +package onlinestore + +import ( + "context" + "path/filepath" + "reflect" + "testing" + + "github.com/feast-dev/feast/go/internal/feast/registry" + + "github.com/feast-dev/feast/go/internal/test" + "github.com/feast-dev/feast/go/protos/feast/types" + "github.com/stretchr/testify/assert" +) + +func TestSqliteAndFeatureRepoSetup(t *testing.T) { + dir := "../../test" + feature_repo_path := filepath.Join(dir, "feature_repo") + err := test.SetupCleanFeatureRepo(dir) + assert.Nil(t, err) + defer test.CleanUpRepo(dir) + config, err := registry.NewRepoConfigFromFile(feature_repo_path) + assert.Nil(t, err) + assert.Equal(t, "feature_repo", config.Project) + assert.Equal(t, "data/registry.db", config.GetRegistryConfig().Path) + assert.Equal(t, "local", config.Provider) + assert.Equal(t, map[string]interface{}{ + "path": "data/online_store.db", + }, config.OnlineStore) + assert.Empty(t, config.OfflineStore) + assert.Empty(t, config.FeatureServer) + assert.Empty(t, config.Flags) +} + +func TestSqliteOnlineRead(t *testing.T) { + dir := "../../test" + feature_repo_path := filepath.Join(dir, "feature_repo") + test.SetupCleanFeatureRepo(dir) + defer test.CleanUpRepo(dir) + config, err := registry.NewRepoConfigFromFile(feature_repo_path) + assert.Nil(t, err) + store, err := NewSqliteOnlineStore("feature_repo", config, config.OnlineStore) + defer store.Destruct() + assert.Nil(t, err) + entity_key1 := types.EntityKey{ + JoinKeys: []string{"driver_id"}, + EntityValues: []*types.Value{{Val: &types.Value_Int64Val{Int64Val: 1005}}}, + } + entity_key2 := types.EntityKey{ + JoinKeys: []string{"driver_id"}, + EntityValues: []*types.Value{{Val: &types.Value_Int64Val{Int64Val: 1001}}}, + } + entity_key3 := types.EntityKey{ + JoinKeys: []string{"driver_id"}, + EntityValues: []*types.Value{{Val: &types.Value_Int64Val{Int64Val: 1003}}}, + } + entityKeys := []*types.EntityKey{&entity_key1, &entity_key2, &entity_key3} + tableNames := []string{"driver_hourly_stats"} + featureNames := []string{"conv_rate", "acc_rate", "avg_daily_trips"} + featureData, err := store.OnlineRead(context.Background(), entityKeys, tableNames, featureNames) + assert.Nil(t, err) + returnedFeatureValues := make([]*types.Value, 0) + returnedFeatureNames := make([]string, 0) + for _, featureVector := range featureData { + for idx := range featureVector { + returnedFeatureValues = append(returnedFeatureValues, &featureVector[idx].Value) + returnedFeatureNames = append(returnedFeatureNames, featureVector[idx].Reference.FeatureName) + } + } + rows, err := test.ReadParquet(filepath.Join(feature_repo_path, "data", "driver_stats.parquet")) + assert.Nil(t, err) + entities := map[int64]bool{1005: true, 1001: true, 1003: true} + correctFeatures := test.GetLatestFeatures(rows, entities) + expectedFeatureValues := make([]*types.Value, 0) + for _, key := range []int64{1005, 1001, 1003} { + expectedFeatureValues = append(expectedFeatureValues, &types.Value{Val: &types.Value_FloatVal{FloatVal: correctFeatures[key].ConvRate}}) + expectedFeatureValues = append(expectedFeatureValues, &types.Value{Val: &types.Value_FloatVal{FloatVal: correctFeatures[key].AccRate}}) + expectedFeatureValues = append(expectedFeatureValues, &types.Value{Val: &types.Value_Int64Val{Int64Val: int64(correctFeatures[key].AvgDailyTrips)}}) + } + expectedFeatureNames := []string{"conv_rate", "acc_rate", "avg_daily_trips", "conv_rate", "acc_rate", "avg_daily_trips", "conv_rate", "acc_rate", "avg_daily_trips"} + assert.True(t, reflect.DeepEqual(expectedFeatureValues, returnedFeatureValues)) + assert.True(t, reflect.DeepEqual(expectedFeatureNames, returnedFeatureNames)) +} diff --git a/go/internal/feast/registry/local.go b/go/internal/feast/registry/local.go new file mode 100644 index 0000000000..22db73a320 --- /dev/null +++ b/go/internal/feast/registry/local.go @@ -0,0 +1,64 @@ +package registry + +import ( + "github.com/feast-dev/feast/go/protos/feast/core" + "github.com/golang/protobuf/proto" + "github.com/google/uuid" + "google.golang.org/protobuf/types/known/timestamppb" + "io/ioutil" + "os" + "path/filepath" +) + +// A LocalRegistryStore is a file-based implementation of the RegistryStore interface. +type LocalRegistryStore struct { + filePath string +} + +// NewLocalRegistryStore creates a LocalRegistryStore with the given configuration and infers +// the file path from the repo path and registry path. +func NewLocalRegistryStore(config *RegistryConfig, repoPath string) *LocalRegistryStore { + lr := LocalRegistryStore{} + registryPath := config.Path + if filepath.IsAbs(registryPath) { + lr.filePath = registryPath + } else { + lr.filePath = filepath.Join(repoPath, registryPath) + } + return &lr +} + +// GetRegistryProto reads and parses the registry proto from the file path. +func (r *LocalRegistryStore) GetRegistryProto() (*core.Registry, error) { + registry := &core.Registry{} + in, err := ioutil.ReadFile(r.filePath) + if err != nil { + return nil, err + } + if err := proto.Unmarshal(in, registry); err != nil { + return nil, err + } + return registry, nil +} + +func (r *LocalRegistryStore) UpdateRegistryProto(rp *core.Registry) error { + return r.writeRegistry(rp) +} + +func (r *LocalRegistryStore) Teardown() error { + return os.Remove(r.filePath) +} + +func (r *LocalRegistryStore) writeRegistry(rp *core.Registry) error { + rp.VersionId = uuid.New().String() + rp.LastUpdated = timestamppb.Now() + bytes, err := proto.Marshal(rp) + if err != nil { + return err + } + err = ioutil.WriteFile(r.filePath, bytes, 0644) + if err != nil { + return err + } + return nil +} diff --git a/go/internal/feast/registry/registry.go b/go/internal/feast/registry/registry.go new file mode 100644 index 0000000000..38cf167a9f --- /dev/null +++ b/go/internal/feast/registry/registry.go @@ -0,0 +1,298 @@ +package registry + +import ( + "errors" + "fmt" + "net/url" + "sync" + "time" + + "github.com/feast-dev/feast/go/internal/feast/model" + + "github.com/feast-dev/feast/go/protos/feast/core" +) + +var REGISTRY_SCHEMA_VERSION string = "1" +var REGISTRY_STORE_CLASS_FOR_SCHEME map[string]string = map[string]string{ + "gs": "GCSRegistryStore", + "s3": "S3RegistryStore", + "file": "LocalRegistryStore", + "": "LocalRegistryStore", +} + +/* + Store protos of FeatureView, FeatureService, Entity, OnDemandFeatureView + but return to user copies of non-proto versions of these objects +*/ + +type Registry struct { + registryStore RegistryStore + cachedFeatureServices map[string]map[string]*core.FeatureService + cachedEntities map[string]map[string]*core.Entity + cachedFeatureViews map[string]map[string]*core.FeatureView + cachedOnDemandFeatureViews map[string]map[string]*core.OnDemandFeatureView + cachedRegistry *core.Registry + cachedRegistryProtoLastUpdated time.Time + cachedRegistryProtoTtl time.Duration + mu sync.Mutex +} + +func NewRegistry(registryConfig *RegistryConfig, repoPath string) (*Registry, error) { + registryStoreType := registryConfig.RegistryStoreType + registryPath := registryConfig.Path + r := &Registry{ + cachedRegistryProtoTtl: time.Duration(registryConfig.CacheTtlSeconds), + } + + if len(registryStoreType) == 0 { + registryStore, err := getRegistryStoreFromScheme(registryPath, registryConfig, repoPath) + if err != nil { + return nil, err + } + r.registryStore = registryStore + } else { + registryStore, err := getRegistryStoreFromType(registryStoreType, registryConfig, repoPath) + if err != nil { + return nil, err + } + r.registryStore = registryStore + } + + return r, nil +} + +func (r *Registry) InitializeRegistry() { + _, err := r.getRegistryProto() + if err != nil { + registryProto := &core.Registry{RegistrySchemaVersion: REGISTRY_SCHEMA_VERSION} + r.registryStore.UpdateRegistryProto(registryProto) + go r.refreshRegistryOnInterval() + } +} + +func (r *Registry) refreshRegistryOnInterval() { + ticker := time.NewTicker(r.cachedRegistryProtoTtl) + for ; true; <-ticker.C { + err := r.refresh() + if err != nil { + return + } + } +} + +// TODO: Add a goroutine and automatically refresh every cachedRegistryProtoTtl +func (r *Registry) refresh() error { + _, err := r.getRegistryProto() + return err +} + +func (r *Registry) getRegistryProto() (*core.Registry, error) { + expired := r.cachedRegistry == nil || (r.cachedRegistryProtoTtl > 0 && time.Now().After(r.cachedRegistryProtoLastUpdated.Add(r.cachedRegistryProtoTtl))) + if !expired { + return r.cachedRegistry, nil + } + registryProto, err := r.registryStore.GetRegistryProto() + if err != nil { + return registryProto, err + } + r.load(registryProto) + return registryProto, nil +} + +func (r *Registry) load(registry *core.Registry) { + r.mu.Lock() + defer r.mu.Unlock() + r.cachedRegistry = registry + r.cachedFeatureServices = make(map[string]map[string]*core.FeatureService) + r.cachedEntities = make(map[string]map[string]*core.Entity) + r.cachedFeatureViews = make(map[string]map[string]*core.FeatureView) + r.cachedOnDemandFeatureViews = make(map[string]map[string]*core.OnDemandFeatureView) + r.loadEntities(registry) + r.loadFeatureServices(registry) + r.loadFeatureViews(registry) + r.loadOnDemandFeatureViews(registry) + r.cachedRegistryProtoLastUpdated = time.Now() +} + +func (r *Registry) loadEntities(registry *core.Registry) { + entities := registry.Entities + for _, entity := range entities { + if _, ok := r.cachedEntities[entity.Spec.Project]; !ok { + r.cachedEntities[entity.Spec.Project] = make(map[string]*core.Entity) + } + r.cachedEntities[entity.Spec.Project][entity.Spec.Name] = entity + } +} + +func (r *Registry) loadFeatureServices(registry *core.Registry) { + featureServices := registry.FeatureServices + for _, featureService := range featureServices { + if _, ok := r.cachedFeatureServices[featureService.Spec.Project]; !ok { + r.cachedFeatureServices[featureService.Spec.Project] = make(map[string]*core.FeatureService) + } + r.cachedFeatureServices[featureService.Spec.Project][featureService.Spec.Name] = featureService + } +} + +func (r *Registry) loadFeatureViews(registry *core.Registry) { + featureViews := registry.FeatureViews + for _, featureView := range featureViews { + if _, ok := r.cachedFeatureViews[featureView.Spec.Project]; !ok { + r.cachedFeatureViews[featureView.Spec.Project] = make(map[string]*core.FeatureView) + } + r.cachedFeatureViews[featureView.Spec.Project][featureView.Spec.Name] = featureView + } +} + +func (r *Registry) loadOnDemandFeatureViews(registry *core.Registry) { + onDemandFeatureViews := registry.OnDemandFeatureViews + for _, onDemandFeatureView := range onDemandFeatureViews { + if _, ok := r.cachedOnDemandFeatureViews[onDemandFeatureView.Spec.Project]; !ok { + r.cachedOnDemandFeatureViews[onDemandFeatureView.Spec.Project] = make(map[string]*core.OnDemandFeatureView) + } + r.cachedOnDemandFeatureViews[onDemandFeatureView.Spec.Project][onDemandFeatureView.Spec.Name] = onDemandFeatureView + } +} + +/* + Look up Entities inside project + Returns empty list if project not found +*/ + +func (r *Registry) ListEntities(project string) ([]*model.Entity, error) { + if cachedEntities, ok := r.cachedEntities[project]; !ok { + return []*model.Entity{}, nil + } else { + entities := make([]*model.Entity, len(cachedEntities)) + index := 0 + for _, entityProto := range cachedEntities { + entities[index] = model.NewEntityFromProto(entityProto) + index += 1 + } + return entities, nil + } +} + +/* + Look up Feature Views inside project + Returns empty list if project not found +*/ + +func (r *Registry) ListFeatureViews(project string) ([]*model.FeatureView, error) { + if cachedFeatureViews, ok := r.cachedFeatureViews[project]; !ok { + return []*model.FeatureView{}, nil + } else { + featureViews := make([]*model.FeatureView, len(cachedFeatureViews)) + index := 0 + for _, featureViewProto := range cachedFeatureViews { + featureViews[index] = model.NewFeatureViewFromProto(featureViewProto) + index += 1 + } + return featureViews, nil + } +} + +/* + Look up Feature Views inside project + Returns empty list if project not found +*/ + +func (r *Registry) ListFeatureServices(project string) ([]*model.FeatureService, error) { + if cachedFeatureServices, ok := r.cachedFeatureServices[project]; !ok { + return []*model.FeatureService{}, nil + } else { + featureServices := make([]*model.FeatureService, len(cachedFeatureServices)) + index := 0 + for _, featureServiceProto := range cachedFeatureServices { + featureServices[index] = model.NewFeatureServiceFromProto(featureServiceProto) + index += 1 + } + return featureServices, nil + } +} + +/* + Look up On Demand Feature Views inside project + Returns empty list if project not found +*/ + +func (r *Registry) ListOnDemandFeatureViews(project string) ([]*model.OnDemandFeatureView, error) { + if cachedOnDemandFeatureViews, ok := r.cachedOnDemandFeatureViews[project]; !ok { + return []*model.OnDemandFeatureView{}, nil + } else { + onDemandFeatureViews := make([]*model.OnDemandFeatureView, len(cachedOnDemandFeatureViews)) + index := 0 + for _, onDemandFeatureViewProto := range cachedOnDemandFeatureViews { + onDemandFeatureViews[index] = model.NewOnDemandFeatureViewFromProto(onDemandFeatureViewProto) + index += 1 + } + return onDemandFeatureViews, nil + } +} + +func (r *Registry) GetEntity(project, entityName string) (*model.Entity, error) { + if cachedEntities, ok := r.cachedEntities[project]; !ok { + return nil, fmt.Errorf("no cached entities found for project %s", project) + } else { + if entity, ok := cachedEntities[entityName]; !ok { + return nil, fmt.Errorf("no cached entity %s found for project %s", entityName, project) + } else { + return model.NewEntityFromProto(entity), nil + } + } +} + +func (r *Registry) GetFeatureView(project, featureViewName string) (*model.FeatureView, error) { + if cachedFeatureViews, ok := r.cachedFeatureViews[project]; !ok { + return nil, fmt.Errorf("no cached feature views found for project %s", project) + } else { + if featureViewProto, ok := cachedFeatureViews[featureViewName]; !ok { + return nil, fmt.Errorf("no cached feature view %s found for project %s", featureViewName, project) + } else { + return model.NewFeatureViewFromProto(featureViewProto), nil + } + } +} + +func (r *Registry) GetFeatureService(project, featureServiceName string) (*model.FeatureService, error) { + if cachedFeatureServices, ok := r.cachedFeatureServices[project]; !ok { + return nil, fmt.Errorf("no cached feature services found for project %s", project) + } else { + if featureServiceProto, ok := cachedFeatureServices[featureServiceName]; !ok { + return nil, fmt.Errorf("no cached feature service %s found for project %s", featureServiceName, project) + } else { + return model.NewFeatureServiceFromProto(featureServiceProto), nil + } + } +} + +func (r *Registry) GetOnDemandFeatureView(project, onDemandFeatureViewName string) (*model.OnDemandFeatureView, error) { + if cachedOnDemandFeatureViews, ok := r.cachedOnDemandFeatureViews[project]; !ok { + return nil, fmt.Errorf("no cached on demand feature views found for project %s", project) + } else { + if onDemandFeatureViewProto, ok := cachedOnDemandFeatureViews[onDemandFeatureViewName]; !ok { + return nil, fmt.Errorf("no cached on demand feature view %s found for project %s", onDemandFeatureViewName, project) + } else { + return model.NewOnDemandFeatureViewFromProto(onDemandFeatureViewProto), nil + } + } +} + +func getRegistryStoreFromScheme(registryPath string, registryConfig *RegistryConfig, repoPath string) (RegistryStore, error) { + uri, err := url.Parse(registryPath) + if err != nil { + return nil, err + } + if registryStoreType, ok := REGISTRY_STORE_CLASS_FOR_SCHEME[uri.Scheme]; ok { + return getRegistryStoreFromType(registryStoreType, registryConfig, repoPath) + } + return nil, fmt.Errorf("registry path %s has unsupported scheme %s. Supported schemes are file, s3 and gs", registryPath, uri.Scheme) +} + +func getRegistryStoreFromType(registryStoreType string, registryConfig *RegistryConfig, repoPath string) (RegistryStore, error) { + switch registryStoreType { + case "LocalRegistryStore": + return NewLocalRegistryStore(registryConfig, repoPath), nil + } + return nil, errors.New("only LocalRegistryStore as a RegistryStore is supported at this moment") +} diff --git a/go/internal/feast/registry/registrystore.go b/go/internal/feast/registry/registrystore.go new file mode 100644 index 0000000000..f872d66858 --- /dev/null +++ b/go/internal/feast/registry/registrystore.go @@ -0,0 +1,12 @@ +package registry + +import ( + "github.com/feast-dev/feast/go/protos/feast/core" +) + +// A RegistryStore is a storage backend for the Feast registry. +type RegistryStore interface { + GetRegistryProto() (*core.Registry, error) + UpdateRegistryProto(*core.Registry) error + Teardown() error +} diff --git a/go/internal/feast/registry/repoconfig.go b/go/internal/feast/registry/repoconfig.go new file mode 100644 index 0000000000..e5efd899de --- /dev/null +++ b/go/internal/feast/registry/repoconfig.go @@ -0,0 +1,97 @@ +package registry + +import ( + "encoding/json" + "github.com/ghodss/yaml" + "io/ioutil" + "path/filepath" +) + +const ( + defaultCacheTtlSeconds = 600 +) + +type RepoConfig struct { + // Feast project name + Project string `json:"project"` + // Feast provider name + Provider string `json:"provider"` + // Path to the registry. Custom registry loaders are not yet supported + // Registry string `json:"registry"` + Registry interface{} `json:"registry"` + // Online store config + OnlineStore map[string]interface{} `json:"online_store"` + // Offline store config + OfflineStore map[string]interface{} `json:"offline_store"` + // Feature server config (currently unrelated to Go server) + FeatureServer map[string]interface{} `json:"feature_server"` + // Feature flags for experimental features + Flags map[string]interface{} `json:"flags"` + // RepoPath + RepoPath string `json:"repo_path"` +} + +type RegistryConfig struct { + RegistryStoreType string `json:"registry_store_type"` + Path string `json:"path"` + CacheTtlSeconds int64 `json:"cache_ttl_seconds" default:"600"` +} + +// NewRepoConfigFromJSON converts a JSON string into a RepoConfig struct and also sets the repo path. +func NewRepoConfigFromJSON(repoPath, configJSON string) (*RepoConfig, error) { + config := RepoConfig{} + if err := json.Unmarshal([]byte(configJSON), &config); err != nil { + return nil, err + } + repoPath, err := filepath.Abs(repoPath) + if err != nil { + return nil, err + } + config.RepoPath = repoPath + return &config, nil +} + +// NewRepoConfigFromFile reads the `feature_store.yaml` file in the repo path and converts it +// into a RepoConfig struct. +func NewRepoConfigFromFile(repoPath string) (*RepoConfig, error) { + data, err := ioutil.ReadFile(filepath.Join(repoPath, "feature_store.yaml")) + if err != nil { + return nil, err + } + repoPath, err = filepath.Abs(repoPath) + if err != nil { + return nil, err + } + + config := RepoConfig{} + if err = yaml.Unmarshal(data, &config); err != nil { + return nil, err + } + config.RepoPath = repoPath + return &config, nil +} + +func (r *RepoConfig) GetRegistryConfig() *RegistryConfig { + if registryConfigMap, ok := r.Registry.(map[string]interface{}); ok { + registryConfig := RegistryConfig{CacheTtlSeconds: defaultCacheTtlSeconds} + for k, v := range registryConfigMap { + switch k { + case "path": + if value, ok := v.(string); ok { + registryConfig.Path = value + } + case "registry_store_type": + if value, ok := v.(string); ok { + registryConfig.RegistryStoreType = value + } + case "cache_ttl_seconds": + if value, ok := v.(int64); ok { + registryConfig.CacheTtlSeconds = value + } + } + } + return ®istryConfig + } else { + return &RegistryConfig{Path: r.Registry.(string), CacheTtlSeconds: defaultCacheTtlSeconds} + } +} diff --git a/go/internal/feast/registry/repoconfig_test.go b/go/internal/feast/registry/repoconfig_test.go new file mode 100644 index 0000000000..c3336fd618 --- /dev/null +++ b/go/internal/feast/registry/repoconfig_test.go @@ -0,0 +1,97 @@ +package registry + +import ( + "github.com/stretchr/testify/assert" + "os" + "path/filepath" + "testing" +) + +func TestNewRepoConfig(t *testing.T) { + dir, err := os.MkdirTemp("", "feature_repo_*") + assert.Nil(t, err) + defer func() { + assert.Nil(t, os.RemoveAll(dir)) + }() + filePath := filepath.Join(dir, "feature_store.yaml") + data := []byte(` +project: feature_repo +registry: "data/registry.db" +provider: local +online_store: + type: redis + connection_string: "localhost:6379" +`) + err = os.WriteFile(filePath, data, 0666) + assert.Nil(t, err) + config, err := NewRepoConfigFromFile(dir) + assert.Nil(t, err) + assert.Equal(t, "feature_repo", config.Project) + assert.Equal(t, dir, config.RepoPath) + assert.Equal(t, "data/registry.db", config.GetRegistryConfig().Path) + assert.Equal(t, "local", config.Provider) + assert.Equal(t, map[string]interface{}{ + "type": "redis", + "connection_string": "localhost:6379", + }, config.OnlineStore) + assert.Empty(t, config.OfflineStore) + assert.Empty(t, config.FeatureServer) + assert.Empty(t, config.Flags) +} + +func TestNewRepoConfigRegistryMap(t *testing.T) { + dir, err := os.MkdirTemp("", "feature_repo_*") + assert.Nil(t, err) + defer func() { + assert.Nil(t, os.RemoveAll(dir)) + }() + filePath := filepath.Join(dir, "feature_store.yaml") + data := []byte(` +registry: + path: data/registry.db +project: feature_repo +provider: local +online_store: + type: redis + connection_string: "localhost:6379" +`) + err = os.WriteFile(filePath, data, 0666) + assert.Nil(t, err) + config, err := NewRepoConfigFromFile(dir) + assert.Nil(t, err) + assert.Equal(t, "feature_repo", config.Project) + assert.Equal(t, dir, config.RepoPath) + assert.Equal(t, "data/registry.db", config.GetRegistryConfig().Path) + assert.Equal(t, "local", config.Provider) + assert.Equal(t, map[string]interface{}{ + "type": "redis", + "connection_string": "localhost:6379", + }, config.OnlineStore) + assert.Empty(t, config.OfflineStore) + assert.Empty(t, config.FeatureServer) + assert.Empty(t, config.Flags) +} + +func TestNewRepoConfigRegistryConfig(t *testing.T) { + dir, err := os.MkdirTemp("", "feature_repo_*") + assert.Nil(t, err) + defer func() { + assert.Nil(t, os.RemoveAll(dir)) + }() + filePath := filepath.Join(dir, "feature_store.yaml") + data := []byte(` +registry: + path: data/registry.db +project: feature_repo +provider: local +online_store: + type: redis + connection_string: "localhost:6379" +`) + err = os.WriteFile(filePath, data, 0666) + assert.Nil(t, err) + config, err := NewRepoConfigFromFile(dir) + assert.Nil(t, err) + assert.Equal(t, dir, config.RepoPath) + assert.Equal(t, "data/registry.db", config.GetRegistryConfig().Path) +} diff --git a/go/internal/feast/transformation/transformation.go b/go/internal/feast/transformation/transformation.go new file mode 100644 index 0000000000..319bed3b2c --- /dev/null +++ b/go/internal/feast/transformation/transformation.go @@ -0,0 +1,208 @@ +package transformation + +import ( + "errors" + "fmt" + "github.com/apache/arrow/go/v8/arrow" + "github.com/apache/arrow/go/v8/arrow/array" + "github.com/apache/arrow/go/v8/arrow/cdata" + "github.com/apache/arrow/go/v8/arrow/memory" + "github.com/feast-dev/feast/go/internal/feast/model" + "github.com/feast-dev/feast/go/internal/feast/onlineserving" + "github.com/feast-dev/feast/go/protos/feast/serving" + prototypes "github.com/feast-dev/feast/go/protos/feast/types" + "github.com/feast-dev/feast/go/types" + "google.golang.org/protobuf/types/known/timestamppb" + "strings" + "unsafe" +) + +/* + TransformationCallback is a Python callback function's expected signature. + The function should accept name of the on demand feature view and pointers to input & output record batches. + Each record batch is being passed as two pointers: pointer to array (data) and pointer to schema. + Python function is expected to return number of rows added to the output record batch. +*/ +type TransformationCallback func(ODFVName string, inputArrPtr, inputSchemaPtr, outArrPtr, outSchemaPtr uintptr, fullFeatureNames bool) int + +func AugmentResponseWithOnDemandTransforms( + onDemandFeatureViews []*model.OnDemandFeatureView, + requestData map[string]*prototypes.RepeatedValue, + entityRows map[string]*prototypes.RepeatedValue, + features []*onlineserving.FeatureVector, + transformationCallback TransformationCallback, + arrowMemory memory.Allocator, + numRows int, + fullFeatureNames bool, + +) ([]*onlineserving.FeatureVector, error) { + result := make([]*onlineserving.FeatureVector, 0) + var err error + + for _, odfv := range onDemandFeatureViews { + requestContextArrow := make(map[string]array.Interface) + for name, values := range requestData { + requestContextArrow[name], err = types.ProtoValuesToArrowArray(values.Val, arrowMemory, numRows) + if err != nil { + return nil, err + } + } + + for name, values := range entityRows { + requestContextArrow[name], err = types.ProtoValuesToArrowArray(values.Val, arrowMemory, numRows) + if err != nil { + return nil, err + } + } + + retrievedFeatures := make(map[string]array.Interface) + for _, vector := range features { + retrievedFeatures[vector.Name] = vector.Values + } + + onDemandFeatures, err := CallTransformations( + odfv, + retrievedFeatures, + requestContextArrow, + transformationCallback, + numRows, + fullFeatureNames, + ) + if err != nil { + return nil, err + } + result = append(result, onDemandFeatures...) + } + + return result, nil +} + +func CallTransformations( + featureView *model.OnDemandFeatureView, + retrievedFeatures map[string]array.Interface, + requestContext map[string]array.Interface, + callback TransformationCallback, + numRows int, + fullFeatureNames bool, +) ([]*onlineserving.FeatureVector, error) { + + inputArr := cdata.CArrowArray{} + inputSchema := cdata.CArrowSchema{} + + outArr := cdata.CArrowArray{} + outSchema := cdata.CArrowSchema{} + + defer cdata.ReleaseCArrowArray(&inputArr) + defer cdata.ReleaseCArrowArray(&outArr) + defer cdata.ReleaseCArrowSchema(&inputSchema) + defer cdata.ReleaseCArrowSchema(&outSchema) + + inputArrPtr := uintptr(unsafe.Pointer(&inputArr)) + inputSchemaPtr := uintptr(unsafe.Pointer(&inputSchema)) + + outArrPtr := uintptr(unsafe.Pointer(&outArr)) + outSchemaPtr := uintptr(unsafe.Pointer(&outSchema)) + + inputFields := make([]arrow.Field, 0) + inputColumns := make([]array.Interface, 0) + for name, arr := range retrievedFeatures { + inputFields = append(inputFields, arrow.Field{Name: name, Type: arr.DataType()}) + inputColumns = append(inputColumns, arr) + } + for name, arr := range requestContext { + inputFields = append(inputFields, arrow.Field{Name: name, Type: arr.DataType()}) + inputColumns = append(inputColumns, arr) + } + + inputRecord := array.NewRecord(arrow.NewSchema(inputFields, nil), inputColumns, int64(numRows)) + defer inputRecord.Release() + + cdata.ExportArrowRecordBatch(inputRecord, &inputArr, &inputSchema) + + ret := callback(featureView.Base.Name, inputArrPtr, inputSchemaPtr, outArrPtr, outSchemaPtr, fullFeatureNames) + + if ret != numRows { + return nil, errors.New("python transformation callback failed") + } + + outRecord, err := cdata.ImportCRecordBatch(&outArr, &outSchema) + if err != nil { + return nil, err + } + + result := make([]*onlineserving.FeatureVector, 0) + for idx, field := range outRecord.Schema().Fields() { + dropFeature := true + + if featureView.Base.Projection != nil { + var featureName string + if fullFeatureNames { + featureName = strings.Split(field.Name, "__")[1] + } else { + featureName = field.Name + } + + for _, feature := range featureView.Base.Projection.Features { + if featureName == feature.Name { + dropFeature = false + } + } + } else { + dropFeature = false + } + + if dropFeature { + continue + } + + statuses := make([]serving.FieldStatus, numRows) + timestamps := make([]*timestamppb.Timestamp, numRows) + + for idx := 0; idx < numRows; idx++ { + statuses[idx] = serving.FieldStatus_PRESENT + timestamps[idx] = timestamppb.Now() + } + + result = append(result, &onlineserving.FeatureVector{ + Name: field.Name, + Values: outRecord.Column(idx), + Statuses: statuses, + Timestamps: timestamps, + }) + } + + return result, nil +} + +func EnsureRequestedDataExist(requestedOnDemandFeatureViews []*model.OnDemandFeatureView, + requestDataFeatures map[string]*prototypes.RepeatedValue) error { + + neededRequestData, err := getNeededRequestData(requestedOnDemandFeatureViews) + if err != nil { + return err + } + missingFeatures := make([]string, 0) + for feature := range neededRequestData { + if _, ok := requestDataFeatures[feature]; !ok { + missingFeatures = append(missingFeatures, feature) + } + } + + if len(missingFeatures) > 0 { + return fmt.Errorf("requestDataNotFoundInEntityRowsException: %s", strings.Join(missingFeatures, ", ")) + } + return nil +} + +func getNeededRequestData(requestedOnDemandFeatureViews []*model.OnDemandFeatureView) (map[string]struct{}, error) { + neededRequestData := make(map[string]struct{}) + + for _, onDemandFeatureView := range requestedOnDemandFeatureViews { + requestSchema := onDemandFeatureView.GetRequestDataSchema() + for fieldName := range requestSchema { + neededRequestData[fieldName] = struct{}{} + } + } + + return neededRequestData, nil +} diff --git a/go/internal/test/go_integration_test_utils.go b/go/internal/test/go_integration_test_utils.go new file mode 100644 index 0000000000..d66a546193 --- /dev/null +++ b/go/internal/test/go_integration_test_utils.go @@ -0,0 +1,253 @@ +package test + +import ( + "context" + "fmt" + "log" + + "github.com/apache/arrow/go/v8/arrow/memory" + "google.golang.org/protobuf/types/known/durationpb" + "google.golang.org/protobuf/types/known/timestamppb" + + "github.com/apache/arrow/go/v8/arrow" + "github.com/apache/arrow/go/v8/parquet/file" + "github.com/apache/arrow/go/v8/parquet/pqarrow" + + "os" + "os/exec" + "path/filepath" + "time" + + "github.com/apache/arrow/go/v8/arrow/array" + "github.com/feast-dev/feast/go/internal/feast/model" + "github.com/feast-dev/feast/go/protos/feast/types" + gotypes "github.com/feast-dev/feast/go/types" +) + +type Row struct { + EventTimestamp int64 + DriverId int64 + ConvRate float32 + AccRate float32 + AvgDailyTrips int32 + Created int64 +} + +func ReadParquet(filePath string) ([]*Row, error) { + allocator := memory.NewGoAllocator() + pqfile, err := file.OpenParquetFile(filePath, false) + if err != nil { + return nil, err + } + reader, err := pqarrow.NewFileReader(pqfile, pqarrow.ArrowReadProperties{}, allocator) + if err != nil { + return nil, err + } + fmt.Println(reader) + table, err := reader.ReadTable(context.Background()) + if err != nil { + return nil, err + } + + columns := make(map[string]arrow.Array) + fields := table.Schema().Fields() + for idx, field := range fields { + columns[field.Name] = table.Column(idx).Data().Chunk(0) + } + + rows := make([]*Row, 0) + for rowIdx := 0; rowIdx < int(table.NumRows()); rowIdx++ { + rows = append(rows, &Row{ + EventTimestamp: columns["event_timestamp"].(*array.Timestamp).Value(rowIdx).ToTime(arrow.Second).Unix(), + DriverId: columns["driver_id"].(*array.Int64).Value(rowIdx), + ConvRate: columns["conv_rate"].(*array.Float32).Value(rowIdx), + AccRate: columns["acc_rate"].(*array.Float32).Value(rowIdx), + AvgDailyTrips: columns["avg_daily_trips"].(*array.Int32).Value(rowIdx), + Created: columns["created"].(*array.Timestamp).Value(rowIdx).ToTime(arrow.Second).Unix(), + }) + } + + return rows, nil +} + +func GetLatestFeatures(Rows []*Row, entities map[int64]bool) map[int64]*Row { + correctFeatureRows := make(map[int64]*Row) + for _, Row := range Rows { + if _, ok := entities[Row.DriverId]; ok { + if _, ok := correctFeatureRows[Row.DriverId]; ok { + if Row.EventTimestamp > correctFeatureRows[Row.DriverId].EventTimestamp { + correctFeatureRows[Row.DriverId] = Row + } + } else { + correctFeatureRows[Row.DriverId] = Row + } + } + } + return correctFeatureRows +} + +func SetupCleanFeatureRepo(basePath string) error { + cmd := exec.Command("feast", "init", "feature_repo") + path, err := filepath.Abs(basePath) + cmd.Env = os.Environ() + + if err != nil { + return err + } + cmd.Dir = path + err = cmd.Run() + if err != nil { + return err + } + applyCommand := exec.Command("feast", "apply") + applyCommand.Env = os.Environ() + featureRepoPath, err := filepath.Abs(filepath.Join(path, "feature_repo")) + if err != nil { + return err + } + applyCommand.Dir = featureRepoPath + applyCommand.Run() + t := time.Now() + + formattedTime := fmt.Sprintf("%d-%02d-%02dT%02d:%02d:%02d", + t.Year(), t.Month(), t.Day(), + t.Hour(), t.Minute(), t.Second()) + materializeCommand := exec.Command("feast", "materialize-incremental", formattedTime) + materializeCommand.Env = os.Environ() + materializeCommand.Dir = featureRepoPath + err = materializeCommand.Run() + if err != nil { + return err + } + + return nil +} + +func SetupInitializedRepo(basePath string) error { + path, err := filepath.Abs(basePath) + if err != nil { + return err + } + applyCommand := exec.Command("feast", "apply") + applyCommand.Env = os.Environ() + featureRepoPath, err := filepath.Abs(filepath.Join(path, "feature_repo")) + if err != nil { + return err + } + // var stderr bytes.Buffer + // var stdout bytes.Buffer + applyCommand.Dir = featureRepoPath + err = applyCommand.Run() + if err != nil { + return err + + } + t := time.Now() + + formattedTime := fmt.Sprintf("%d-%02d-%02dT%02d:%02d:%02d", + t.Year(), t.Month(), t.Day(), + t.Hour(), t.Minute(), t.Second()) + + materializeCommand := exec.Command("feast", "materialize-incremental", formattedTime) + materializeCommand.Env = os.Environ() + materializeCommand.Dir = featureRepoPath + out, err := materializeCommand.Output() + if err != nil { + log.Println(string(out)) + return err + } + return nil +} + +func CleanUpInitializedRepo(basePath string) { + featureRepoPath, err := filepath.Abs(filepath.Join(basePath, "feature_repo")) + if err != nil { + log.Fatal(err) + } + + err = os.Remove(filepath.Join(featureRepoPath, "data", "registry.db")) + if err != nil { + log.Fatal(err) + } + err = os.Remove(filepath.Join(featureRepoPath, "data", "online_store.db")) + if err != nil { + log.Fatal(err) + } +} + +func CleanUpRepo(basePath string) { + featureRepoPath, err := filepath.Abs(filepath.Join(basePath, "feature_repo")) + if err != nil { + log.Fatal(err) + } + err = os.RemoveAll(featureRepoPath) + if err != nil { + log.Fatal(err) + } +} + +func GetProtoFromRecord(rec array.Record) (map[string]*types.RepeatedValue, error) { + r := make(map[string]*types.RepeatedValue) + schema := rec.Schema() + for idx, column := range rec.Columns() { + field := schema.Field(idx) + values, err := gotypes.ArrowValuesToProtoValues(column) + if err != nil { + return nil, err + } + r[field.Name] = &types.RepeatedValue{Val: values} + } + return r, nil +} + +func CleanUpFile(absPath string) error { + return os.Remove(absPath) +} + +func CreateBaseFeatureView(name string, features []*model.Feature, projection *model.FeatureViewProjection) *model.BaseFeatureView { + return &model.BaseFeatureView{ + Name: name, + Features: features, + Projection: projection, + } +} + +func CreateNewEntity(name string, valueType types.ValueType_Enum, joinKey string) *model.Entity { + return &model.Entity{ + Name: name, + ValueType: valueType, + JoinKey: joinKey, + } +} + +func CreateNewFeature(name string, dtype types.ValueType_Enum) *model.Feature { + return &model.Feature{Name: name, + Dtype: dtype, + } +} + +func CreateNewFeatureService(name string, project string, createdTimestamp *timestamppb.Timestamp, lastUpdatedTimestamp *timestamppb.Timestamp, projections []*model.FeatureViewProjection) *model.FeatureService { + return &model.FeatureService{ + Name: name, + Project: project, + CreatedTimestamp: createdTimestamp, + LastUpdatedTimestamp: lastUpdatedTimestamp, + Projections: projections, + } +} + +func CreateNewFeatureViewProjection(name string, nameAlias string, features []*model.Feature, joinKeyMap map[string]string) *model.FeatureViewProjection { + return &model.FeatureViewProjection{Name: name, + NameAlias: nameAlias, + Features: features, + JoinKeyMap: joinKeyMap, + } +} + +func CreateFeatureView(base *model.BaseFeatureView, ttl *durationpb.Duration, entities []string) *model.FeatureView { + return &model.FeatureView{ + Base: base, + Ttl: ttl, + Entities: entities, + } +} diff --git a/go/types/typeconversion.go b/go/types/typeconversion.go new file mode 100644 index 0000000000..416eb2ac27 --- /dev/null +++ b/go/types/typeconversion.go @@ -0,0 +1,319 @@ +package types + +import ( + "fmt" + + "github.com/apache/arrow/go/v8/arrow" + "github.com/apache/arrow/go/v8/arrow/array" + "github.com/apache/arrow/go/v8/arrow/memory" + "github.com/feast-dev/feast/go/protos/feast/types" +) + +func ProtoTypeToArrowType(sample *types.Value) (arrow.DataType, error) { + switch sample.Val.(type) { + case *types.Value_BytesVal: + return arrow.BinaryTypes.Binary, nil + case *types.Value_StringVal: + return arrow.BinaryTypes.String, nil + case *types.Value_Int32Val: + return arrow.PrimitiveTypes.Int32, nil + case *types.Value_Int64Val: + return arrow.PrimitiveTypes.Int64, nil + case *types.Value_FloatVal: + return arrow.PrimitiveTypes.Float32, nil + case *types.Value_DoubleVal: + return arrow.PrimitiveTypes.Float64, nil + case *types.Value_BoolVal: + return arrow.FixedWidthTypes.Boolean, nil + case *types.Value_BoolListVal: + return arrow.ListOf(arrow.FixedWidthTypes.Boolean), nil + case *types.Value_StringListVal: + return arrow.ListOf(arrow.BinaryTypes.String), nil + case *types.Value_BytesListVal: + return arrow.ListOf(arrow.BinaryTypes.Binary), nil + case *types.Value_Int32ListVal: + return arrow.ListOf(arrow.PrimitiveTypes.Int32), nil + case *types.Value_Int64ListVal: + return arrow.ListOf(arrow.PrimitiveTypes.Int64), nil + case *types.Value_FloatListVal: + return arrow.ListOf(arrow.PrimitiveTypes.Float32), nil + case *types.Value_DoubleListVal: + return arrow.ListOf(arrow.PrimitiveTypes.Float64), nil + case *types.Value_UnixTimestampVal: + return arrow.FixedWidthTypes.Timestamp_s, nil + case *types.Value_UnixTimestampListVal: + return arrow.ListOf(arrow.FixedWidthTypes.Timestamp_s), nil + default: + return nil, + fmt.Errorf("unsupported proto type in proto to arrow conversion: %s", sample.Val) + } +} + +func ValueTypeEnumToArrowType(t types.ValueType_Enum) (arrow.DataType, error) { + switch t { + case types.ValueType_BYTES: + return arrow.BinaryTypes.Binary, nil + case types.ValueType_STRING: + return arrow.BinaryTypes.String, nil + case types.ValueType_INT32: + return arrow.PrimitiveTypes.Int32, nil + case types.ValueType_INT64: + return arrow.PrimitiveTypes.Int64, nil + case types.ValueType_FLOAT: + return arrow.PrimitiveTypes.Float32, nil + case types.ValueType_DOUBLE: + return arrow.PrimitiveTypes.Float64, nil + case types.ValueType_BOOL: + return arrow.FixedWidthTypes.Boolean, nil + case types.ValueType_BOOL_LIST: + return arrow.ListOf(arrow.FixedWidthTypes.Boolean), nil + case types.ValueType_STRING_LIST: + return arrow.ListOf(arrow.BinaryTypes.String), nil + case types.ValueType_BYTES_LIST: + return arrow.ListOf(arrow.BinaryTypes.Binary), nil + case types.ValueType_INT32_LIST: + return arrow.ListOf(arrow.PrimitiveTypes.Int32), nil + case types.ValueType_INT64_LIST: + return arrow.ListOf(arrow.PrimitiveTypes.Int64), nil + case types.ValueType_FLOAT_LIST: + return arrow.ListOf(arrow.PrimitiveTypes.Float32), nil + case types.ValueType_DOUBLE_LIST: + return arrow.ListOf(arrow.PrimitiveTypes.Float64), nil + case types.ValueType_UNIX_TIMESTAMP: + return arrow.FixedWidthTypes.Timestamp_s, nil + case types.ValueType_UNIX_TIMESTAMP_LIST: + return arrow.ListOf(arrow.FixedWidthTypes.Timestamp_s), nil + default: + return nil, + fmt.Errorf("unsupported value type enum in enum to arrow type conversion: %s", t) + } +} + +func copyProtoValuesToArrowArray(builder array.Builder, values []*types.Value) error { + switch fieldBuilder := builder.(type) { + case *array.BooleanBuilder: + for _, v := range values { + fieldBuilder.Append(v.GetBoolVal()) + } + case *array.BinaryBuilder: + for _, v := range values { + fieldBuilder.Append(v.GetBytesVal()) + } + case *array.StringBuilder: + for _, v := range values { + fieldBuilder.Append(v.GetStringVal()) + } + case *array.Int32Builder: + for _, v := range values { + fieldBuilder.Append(v.GetInt32Val()) + } + case *array.Int64Builder: + for _, v := range values { + fieldBuilder.Append(v.GetInt64Val()) + } + case *array.Float32Builder: + for _, v := range values { + fieldBuilder.Append(v.GetFloatVal()) + } + case *array.Float64Builder: + for _, v := range values { + fieldBuilder.Append(v.GetDoubleVal()) + } + case *array.TimestampBuilder: + for _, v := range values { + fieldBuilder.Append(arrow.Timestamp(v.GetUnixTimestampVal())) + } + case *array.ListBuilder: + for _, list := range values { + fieldBuilder.Append(true) + + switch valueBuilder := fieldBuilder.ValueBuilder().(type) { + + case *array.BooleanBuilder: + for _, v := range list.GetBoolListVal().GetVal() { + valueBuilder.Append(v) + } + case *array.BinaryBuilder: + for _, v := range list.GetBytesListVal().GetVal() { + valueBuilder.Append(v) + } + case *array.StringBuilder: + for _, v := range list.GetStringListVal().GetVal() { + valueBuilder.Append(v) + } + case *array.Int32Builder: + for _, v := range list.GetInt32ListVal().GetVal() { + valueBuilder.Append(v) + } + case *array.Int64Builder: + for _, v := range list.GetInt64ListVal().GetVal() { + valueBuilder.Append(v) + } + case *array.Float32Builder: + for _, v := range list.GetFloatListVal().GetVal() { + valueBuilder.Append(v) + } + case *array.Float64Builder: + for _, v := range list.GetDoubleListVal().GetVal() { + valueBuilder.Append(v) + } + case *array.TimestampBuilder: + for _, v := range list.GetUnixTimestampListVal().GetVal() { + valueBuilder.Append(arrow.Timestamp(v)) + } + } + } + default: + return fmt.Errorf("unsupported array builder: %s", builder) + } + return nil +} + +func ArrowValuesToProtoValues(arr arrow.Array) ([]*types.Value, error) { + values := make([]*types.Value, 0) + + if listArr, ok := arr.(*array.List); ok { + listValues := listArr.ListValues() + offsets := listArr.Offsets()[1:] + pos := 0 + for idx := 0; idx < listArr.Len(); idx++ { + switch listValues.DataType() { + case arrow.PrimitiveTypes.Int32: + vals := make([]int32, int(offsets[idx])-pos) + for j := pos; j < int(offsets[idx]); j++ { + vals[j-pos] = listValues.(*array.Int32).Value(j) + } + values = append(values, + &types.Value{Val: &types.Value_Int32ListVal{Int32ListVal: &types.Int32List{Val: vals}}}) + case arrow.PrimitiveTypes.Int64: + vals := make([]int64, int(offsets[idx])-pos) + for j := pos; j < int(offsets[idx]); j++ { + vals[j-pos] = listValues.(*array.Int64).Value(j) + } + values = append(values, + &types.Value{Val: &types.Value_Int64ListVal{Int64ListVal: &types.Int64List{Val: vals}}}) + case arrow.PrimitiveTypes.Float32: + vals := make([]float32, int(offsets[idx])-pos) + for j := pos; j < int(offsets[idx]); j++ { + vals[j-pos] = listValues.(*array.Float32).Value(j) + } + values = append(values, + &types.Value{Val: &types.Value_FloatListVal{FloatListVal: &types.FloatList{Val: vals}}}) + case arrow.PrimitiveTypes.Float64: + vals := make([]float64, int(offsets[idx])-pos) + for j := pos; j < int(offsets[idx]); j++ { + vals[j-pos] = listValues.(*array.Float64).Value(j) + } + values = append(values, + &types.Value{Val: &types.Value_DoubleListVal{DoubleListVal: &types.DoubleList{Val: vals}}}) + case arrow.BinaryTypes.Binary: + vals := make([][]byte, int(offsets[idx])-pos) + for j := pos; j < int(offsets[idx]); j++ { + vals[j-pos] = listValues.(*array.Binary).Value(j) + } + values = append(values, + &types.Value{Val: &types.Value_BytesListVal{BytesListVal: &types.BytesList{Val: vals}}}) + case arrow.BinaryTypes.String: + vals := make([]string, int(offsets[idx])-pos) + for j := pos; j < int(offsets[idx]); j++ { + vals[j-pos] = listValues.(*array.String).Value(j) + } + values = append(values, + &types.Value{Val: &types.Value_StringListVal{StringListVal: &types.StringList{Val: vals}}}) + case arrow.FixedWidthTypes.Boolean: + vals := make([]bool, int(offsets[idx])-pos) + for j := pos; j < int(offsets[idx]); j++ { + vals[j-pos] = listValues.(*array.Boolean).Value(j) + } + values = append(values, + &types.Value{Val: &types.Value_BoolListVal{BoolListVal: &types.BoolList{Val: vals}}}) + case arrow.FixedWidthTypes.Timestamp_s: + vals := make([]int64, int(offsets[idx])-pos) + for j := pos; j < int(offsets[idx]); j++ { + vals[j-pos] = int64(listValues.(*array.Timestamp).Value(j)) + } + + values = append(values, + &types.Value{Val: &types.Value_UnixTimestampListVal{ + UnixTimestampListVal: &types.Int64List{Val: vals}}}) + + } + + // set the end of current element as start of the next + pos = int(offsets[idx]) + } + + return values, nil + } + + switch arr.DataType() { + case arrow.PrimitiveTypes.Int32: + for _, v := range arr.(*array.Int32).Int32Values() { + values = append(values, &types.Value{Val: &types.Value_Int32Val{Int32Val: v}}) + } + case arrow.PrimitiveTypes.Int64: + for _, v := range arr.(*array.Int64).Int64Values() { + values = append(values, &types.Value{Val: &types.Value_Int64Val{Int64Val: v}}) + } + case arrow.PrimitiveTypes.Float32: + for _, v := range arr.(*array.Float32).Float32Values() { + values = append(values, &types.Value{Val: &types.Value_FloatVal{FloatVal: v}}) + } + case arrow.PrimitiveTypes.Float64: + for _, v := range arr.(*array.Float64).Float64Values() { + values = append(values, &types.Value{Val: &types.Value_DoubleVal{DoubleVal: v}}) + } + case arrow.FixedWidthTypes.Boolean: + for idx := 0; idx < arr.Len(); idx++ { + values = append(values, + &types.Value{Val: &types.Value_BoolVal{BoolVal: arr.(*array.Boolean).Value(idx)}}) + } + case arrow.BinaryTypes.Binary: + for idx := 0; idx < arr.Len(); idx++ { + values = append(values, + &types.Value{Val: &types.Value_BytesVal{BytesVal: arr.(*array.Binary).Value(idx)}}) + } + case arrow.BinaryTypes.String: + for idx := 0; idx < arr.Len(); idx++ { + values = append(values, + &types.Value{Val: &types.Value_StringVal{StringVal: arr.(*array.String).Value(idx)}}) + } + case arrow.FixedWidthTypes.Timestamp_s: + for idx := 0; idx < arr.Len(); idx++ { + values = append(values, + &types.Value{Val: &types.Value_UnixTimestampVal{ + UnixTimestampVal: int64(arr.(*array.Timestamp).Value(idx))}}) + } + default: + return nil, fmt.Errorf("unsupported arrow to proto conversion for type %s", arr.DataType()) + } + + return values, nil +} + +func ProtoValuesToArrowArray(protoValues []*types.Value, arrowAllocator memory.Allocator, numRows int) (arrow.Array, error) { + var fieldType arrow.DataType + var err error + + for _, val := range protoValues { + if val != nil { + fieldType, err = ProtoTypeToArrowType(val) + if err != nil { + return nil, err + } + break + } + } + + if fieldType != nil { + builder := array.NewBuilder(arrowAllocator, fieldType) + err = copyProtoValuesToArrowArray(builder, protoValues) + if err != nil { + return nil, err + } + + return builder.NewArray(), nil + } else { + return array.NewNull(numRows), nil + } +} diff --git a/go/types/typeconversion_test.go b/go/types/typeconversion_test.go new file mode 100644 index 0000000000..05fc32f63a --- /dev/null +++ b/go/types/typeconversion_test.go @@ -0,0 +1,80 @@ +package types + +import ( + "github.com/apache/arrow/go/v8/arrow/memory" + "github.com/feast-dev/feast/go/protos/feast/types" + "github.com/golang/protobuf/proto" + "github.com/stretchr/testify/assert" + "testing" + "time" +) + +var ( + PROTO_VALUES = [][]*types.Value{ + {{Val: &types.Value_Int32Val{10}}, {Val: &types.Value_Int32Val{20}}}, + {{Val: &types.Value_Int64Val{10}}, {Val: &types.Value_Int64Val{20}}}, + {{Val: &types.Value_FloatVal{1.0}}, {Val: &types.Value_FloatVal{2.0}}}, + {{Val: &types.Value_DoubleVal{1.0}}, {Val: &types.Value_DoubleVal{2.0}}}, + {{Val: &types.Value_StringVal{"aaa"}}, {Val: &types.Value_StringVal{"bbb"}}}, + {{Val: &types.Value_BytesVal{[]byte{1, 2, 3}}}, {Val: &types.Value_BytesVal{[]byte{4, 5, 6}}}}, + {{Val: &types.Value_BoolVal{true}}, {Val: &types.Value_BoolVal{false}}}, + {{Val: &types.Value_UnixTimestampVal{time.Now().Unix()}}, + {Val: &types.Value_UnixTimestampVal{time.Now().Unix()}}}, + + { + {Val: &types.Value_Int32ListVal{&types.Int32List{Val: []int32{0, 1, 2}}}}, + {Val: &types.Value_Int32ListVal{&types.Int32List{Val: []int32{3, 4, 5}}}}, + }, + { + {Val: &types.Value_Int64ListVal{&types.Int64List{Val: []int64{0, 1, 2}}}}, + {Val: &types.Value_Int64ListVal{&types.Int64List{Val: []int64{3, 4, 5}}}}, + }, + { + {Val: &types.Value_FloatListVal{&types.FloatList{Val: []float32{0.5, 1.5, 2}}}}, + {Val: &types.Value_FloatListVal{&types.FloatList{Val: []float32{3.5, 4, 5}}}}, + }, + { + {Val: &types.Value_DoubleListVal{&types.DoubleList{Val: []float64{0.5, 1, 2}}}}, + {Val: &types.Value_DoubleListVal{&types.DoubleList{Val: []float64{3.5, 4, 5}}}}, + }, + { + {Val: &types.Value_BytesListVal{&types.BytesList{Val: [][]byte{{0, 1}, {2}}}}}, + {Val: &types.Value_BytesListVal{&types.BytesList{Val: [][]byte{{3, 4}, {5}}}}}, + }, + { + {Val: &types.Value_StringListVal{&types.StringList{Val: []string{"aa", "bb"}}}}, + {Val: &types.Value_StringListVal{&types.StringList{Val: []string{"cc", "dd"}}}}, + }, + { + {Val: &types.Value_BoolListVal{&types.BoolList{Val: []bool{false, false}}}}, + {Val: &types.Value_BoolListVal{&types.BoolList{Val: []bool{true, true}}}}, + }, + { + {Val: &types.Value_UnixTimestampListVal{&types.Int64List{Val: []int64{time.Now().Unix()}}}}, + {Val: &types.Value_UnixTimestampListVal{&types.Int64List{Val: []int64{time.Now().Unix()}}}}, + }, + } +) + +func TestConversionBetweenProtoAndArrow(t *testing.T) { + pool := memory.NewGoAllocator() + for _, vector := range PROTO_VALUES { + arrowArray, err := ProtoValuesToArrowArray(vector, pool, len(vector)) + assert.Nil(t, err) + + protoValues, err := ArrowValuesToProtoValues(arrowArray) + assert.Nil(t, err) + + protoValuesEquals(t, vector, protoValues) + } + +} + +func protoValuesEquals(t *testing.T, a, b []*types.Value) { + assert.Equal(t, len(a), len(b)) + + for idx, left := range a { + assert.Truef(t, proto.Equal(left, b[idx]), + "Arrays are not equal. Diff[%d] %v != %v", idx, left, b[idx]) + } +} diff --git a/infra/charts/README.md b/infra/charts/README.md deleted file mode 100644 index 751bd4f483..0000000000 --- a/infra/charts/README.md +++ /dev/null @@ -1,8 +0,0 @@ -# Feast Helm Charts - -Feast Helm Charts have been moved out of this repository. - -* The master chart can now be found in at: https://github.com/feast-dev/feast-helm-charts -* Feast Serving & Core Charts are at: https://github.com/feast-dev/feast-java/tree/master/infra -* Feast Spark (Job Service) is at: https://github.com/feast-dev/feast-spark/tree/master/infra/charts/feast-spark -* Feast Jupyter server is at: https://github.com/feast-dev/feast-helm-charts/blob/main/charts/feast-jupyter/README.md diff --git a/infra/charts/feast-python-server/.helmignore b/infra/charts/feast-python-server/.helmignore new file mode 100644 index 0000000000..0e8a0eb36f --- /dev/null +++ b/infra/charts/feast-python-server/.helmignore @@ -0,0 +1,23 @@ +# Patterns to ignore when building packages. +# This supports shell glob matching, relative path matching, and +# negation (prefixed with !). Only one pattern per line. +.DS_Store +# Common VCS dirs +.git/ +.gitignore +.bzr/ +.bzrignore +.hg/ +.hgignore +.svn/ +# Common backup files +*.swp +*.bak +*.tmp +*.orig +*~ +# Various IDEs +.project +.idea/ +*.tmproj +.vscode/ diff --git a/infra/charts/feast-python-server/Chart.yaml b/infra/charts/feast-python-server/Chart.yaml new file mode 100644 index 0000000000..d0e4ef92cf --- /dev/null +++ b/infra/charts/feast-python-server/Chart.yaml @@ -0,0 +1,12 @@ +apiVersion: v2 +name: feast-python-server +description: Feast Feature Server in Python +type: application +version: 0.20.2 +keywords: + - machine learning + - big data + - mlops +home: https://feast.dev/ +sources: + - https://github.com/feast-dev/feast diff --git a/infra/charts/feast-python-server/README.md b/infra/charts/feast-python-server/README.md new file mode 100644 index 0000000000..a650ff2c18 --- /dev/null +++ b/infra/charts/feast-python-server/README.md @@ -0,0 +1,59 @@ +# feast-python-server + +![Version: 0.20.2](https://img.shields.io/badge/Version-0.20.2-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square) + +Feast Feature Server in Python + +**Homepage:** + +## Source Code + +* + +## Values + +| Key | Type | Default | Description | +|-----|------|---------|-------------| +| affinity | object | `{}` | | +| fullnameOverride | string | `""` | | +| image.pullPolicy | string | `"IfNotPresent"` | | +| image.repository | string | `""` | [required] The repository for the Docker image | +| image.tag | string | `""` | [required] The Docker image tag | +| imagePullSecrets | list | `[]` | | +| livenessProbe.initialDelaySeconds | int | `30` | | +| livenessProbe.periodSeconds | int | `30` | | +| nameOverride | string | `""` | | +| nodeSelector | object | `{}` | | +| podAnnotations | object | `{}` | | +| podSecurityContext | object | `{}` | | +| readinessProbe.initialDelaySeconds | int | `20` | | +| readinessProbe.periodSeconds | int | `10` | | +| replicaCount | int | `1` | | +| resources | object | `{}` | | +| securityContext | object | `{}` | | +| service.port | int | `80` | | +| service.type | string | `"ClusterIP"` | | +| tolerations | list | `[]` | | + +---------------------------------------------- +Autogenerated from chart metadata using [helm-docs v1.5.0](https://github.com/norwoodj/helm-docs/releases/v1.5.0) + + +Docker repository and tag are required. Helm install example: +``` +helm install feast-python-server . --set image.repository=REPO --set image.tag=TAG +``` + +Deployment assumes that `feature_store.yaml` exists on docker image. Example docker image: +``` +FROM python:3.7 + +RUN apt update && \ + apt install -y jq + +RUN pip install pip --upgrade + +RUN pip install feast + +COPY feature_store.yaml /feature_store.yaml +``` \ No newline at end of file diff --git a/infra/charts/feast-python-server/templates/_helpers.tpl b/infra/charts/feast-python-server/templates/_helpers.tpl new file mode 100644 index 0000000000..b64e10536d --- /dev/null +++ b/infra/charts/feast-python-server/templates/_helpers.tpl @@ -0,0 +1,52 @@ +{{/* vim: set filetype=mustache: */}} +{{/* +Expand the name of the chart. +*/}} +{{- define "feast-python-server.name" -}} +{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{/* +Create a default fully qualified app name. +We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). +If release name contains chart name it will be used as a full name. +*/}} +{{- define "feast-python-server.fullname" -}} +{{- if .Values.fullnameOverride }} +{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }} +{{- else }} +{{- $name := default .Chart.Name .Values.nameOverride }} +{{- if contains $name .Release.Name }} +{{- .Release.Name | trunc 63 | trimSuffix "-" }} +{{- else }} +{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }} +{{- end }} +{{- end }} +{{- end }} + +{{/* +Create chart name and version as used by the chart label. +*/}} +{{- define "feast-python-server.chart" -}} +{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{/* +Common labels +*/}} +{{- define "feast-python-server.labels" -}} +helm.sh/chart: {{ include "feast-python-server.chart" . }} +{{ include "feast-python-server.selectorLabels" . }} +{{- if .Chart.AppVersion }} +app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} +{{- end }} +app.kubernetes.io/managed-by: {{ .Release.Service }} +{{- end }} + +{{/* +Selector labels +*/}} +{{- define "feast-python-server.selectorLabels" -}} +app.kubernetes.io/name: {{ include "feast-python-server.name" . }} +app.kubernetes.io/instance: {{ .Release.Name }} +{{- end }} diff --git a/infra/charts/feast-python-server/templates/deployment.yaml b/infra/charts/feast-python-server/templates/deployment.yaml new file mode 100644 index 0000000000..56439be658 --- /dev/null +++ b/infra/charts/feast-python-server/templates/deployment.yaml @@ -0,0 +1,61 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ include "feast-python-server.fullname" . }} + labels: + {{- include "feast-python-server.labels" . | nindent 4 }} +spec: + replicas: {{ .Values.replicaCount }} + selector: + matchLabels: + {{- include "feast-python-server.selectorLabels" . | nindent 6 }} + template: + metadata: + {{- with .Values.podAnnotations }} + annotations: + {{- toYaml . | nindent 8 }} + {{- end }} + labels: + {{- include "feast-python-server.selectorLabels" . | nindent 8 }} + spec: + {{- with .Values.imagePullSecrets }} + imagePullSecrets: + {{- toYaml . | nindent 8 }} + {{- end }} + securityContext: + {{- toYaml .Values.podSecurityContext | nindent 8 }} + containers: + - name: {{ .Chart.Name }} + securityContext: + {{- toYaml .Values.securityContext | nindent 12 }} + image: "{{ .Values.image.repository }}:{{ .Values.image.tag }}" + imagePullPolicy: {{ .Values.image.pullPolicy }} + command: ["feast", "serve", "-h", "0.0.0.0"] + ports: + - name: http + containerPort: 6566 + protocol: TCP + livenessProbe: + tcpSocket: + port: http + initialDelaySeconds: {{ .Values.livenessProbe.initialDelaySeconds }} + periodSeconds: {{ .Values.livenessProbe.periodSeconds }} + readinessProbe: + tcpSocket: + port: http + initialDelaySeconds: {{ .Values.readinessProbe.initialDelaySeconds }} + periodSeconds: {{ .Values.readinessProbe.periodSeconds }} + resources: + {{- toYaml .Values.resources | nindent 12 }} + {{- with .Values.nodeSelector }} + nodeSelector: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.affinity }} + affinity: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.tolerations }} + tolerations: + {{- toYaml . | nindent 8 }} + {{- end }} diff --git a/infra/charts/feast-python-server/templates/service.yaml b/infra/charts/feast-python-server/templates/service.yaml new file mode 100644 index 0000000000..86bf38a9a4 --- /dev/null +++ b/infra/charts/feast-python-server/templates/service.yaml @@ -0,0 +1,15 @@ +apiVersion: v1 +kind: Service +metadata: + name: {{ include "feast-python-server.name" . }} + labels: + {{- include "feast-python-server.labels" . | nindent 4 }} +spec: + type: {{ .Values.service.type }} + ports: + - port: {{ .Values.service.port }} + targetPort: http + protocol: TCP + name: http + selector: + {{- include "feast-python-server.selectorLabels" . | nindent 4 }} diff --git a/infra/charts/feast-python-server/values.yaml b/infra/charts/feast-python-server/values.yaml new file mode 100644 index 0000000000..f62f95a757 --- /dev/null +++ b/infra/charts/feast-python-server/values.yaml @@ -0,0 +1,57 @@ +# Default values for feast. +# This is a YAML-formatted file. +# Declare variables to be passed into your templates. + +replicaCount: 1 + +image: + repository: "" + pullPolicy: IfNotPresent + tag: "" + +imagePullSecrets: [] +nameOverride: "" +fullnameOverride: "" + +podAnnotations: {} + +podSecurityContext: {} + # fsGroup: 2000 + +securityContext: {} + # capabilities: + # drop: + # - ALL + # readOnlyRootFilesystem: true + # runAsNonRoot: true + # runAsUser: 1000 + +service: + type: ClusterIP + port: 80 + +resources: {} + # We usually recommend not to specify default resources and to leave this as a conscious + # choice for the user. This also increases chances charts run on environments with little + # resources, such as Minikube. If you do want to specify resources, uncomment the following + # lines, adjust them as necessary, and remove the curly braces after 'resources:'. + # limits: + # cpu: 100m + # memory: 128Mi + # requests: + # cpu: 100m + # memory: 128Mi + +nodeSelector: {} + +tolerations: [] + +affinity: {} + +livenessProbe: + initialDelaySeconds: 30 + periodSeconds: 30 + +readinessProbe: + initialDelaySeconds: 20 + periodSeconds: 10 diff --git a/infra/charts/feast/.helmignore b/infra/charts/feast/.helmignore new file mode 100644 index 0000000000..50af031725 --- /dev/null +++ b/infra/charts/feast/.helmignore @@ -0,0 +1,22 @@ +# Patterns to ignore when building packages. +# This supports shell glob matching, relative path matching, and +# negation (prefixed with !). Only one pattern per line. +.DS_Store +# Common VCS dirs +.git/ +.gitignore +.bzr/ +.bzrignore +.hg/ +.hgignore +.svn/ +# Common backup files +*.swp +*.bak +*.tmp +*~ +# Various IDEs +.project +.idea/ +*.tmproj +.vscode/ diff --git a/infra/charts/feast/Chart.yaml b/infra/charts/feast/Chart.yaml new file mode 100644 index 0000000000..8fa8a2101e --- /dev/null +++ b/infra/charts/feast/Chart.yaml @@ -0,0 +1,11 @@ +apiVersion: v1 +description: Feature store for machine learning +name: feast +version: 0.20.2 +keywords: + - machine learning + - big data + - mlops +home: https://feast.dev/ +sources: + - https://github.com/feast-dev/feast \ No newline at end of file diff --git a/infra/charts/feast/README.md b/infra/charts/feast/README.md new file mode 100644 index 0000000000..99dc9865bd --- /dev/null +++ b/infra/charts/feast/README.md @@ -0,0 +1,72 @@ +# Feast Helm Charts + +This repo contains Helm charts for Feast components that are being installed on Kubernetes: +* Feast (root chart): The complete Helm chart containing all Feast components and dependencies. Most users will use this chart, but can selectively enable/disable subcharts using the values.yaml file. + * [Feature Server](charts/feature-server): High performant JVM-based implementation of feature server. + * [Transformation Service](charts/transformation-service): Transformation server for calculating on-demand features + * Redis: (Optional) One of possible options for an online store used by Feature Server + +## Chart: Feast + +Feature store for machine learning Current chart version is `0.20.2` + +## Installation + +Charts are published to `https://feast-helm-charts.storage.googleapis.com`. Please note that this URL is different from the URL we previously used (`feast-charts`) + +Run the following commands to add the repository + +``` +helm repo add feast-charts https://feast-helm-charts.storage.googleapis.com +helm repo update +``` + +Install Feast +``` +helm install feast-release feast-charts/feast +``` + +## Customize your installation + +This Feast chart comes with a [values.yaml](values.yaml) that allows for configuration and customization of all sub-charts. + +In order to modify the default configuration of Feature Server, please use the `application-override.yaml` key in the `values.yaml` file in this chart. A code snippet example +``` +feature-server: + application-override.yaml: + enabled: true + feast: + active_store: online + stores: + - name: online + type: REDIS + config: + host: localhost + port: 6379 + +``` + +For the default configuration, please see the [Feature Server Configuration](https://github.com/feast-dev/feast-java/blob/master/serving/src/main/resources/application.yml). + +For more details, please see: https://docs.feast.dev/how-to-guides/running-feast-in-production + +## Requirements + +| Repository | Name | Version | +|------------|------|---------| +| https://charts.helm.sh/stable | redis | 10.5.6 | +| https://feast-helm-charts.storage.googleapis.com | feature-server(feature-server) | 0.20.2 | +| https://feast-helm-charts.storage.googleapis.com | transformation-service(transformation-service) | 0.20.2 | + +## Values + +| Key | Type | Default | Description | +|-----|------|---------|-------------| +| feature-server.enabled | bool | `true` | | +| global.project | string | `"default"` | Project from feature_store.yaml | +| global.registry | object | `{"cache_ttl_seconds":0,"path":"gs://path/to/registry.db"}` | Information about registry managed by Feast Python SDK (must be in sync with feature_store.yaml) | +| global.registry.cache_ttl_seconds | int | `0` | Registry cache (in memory) will be refreshed on this interval | +| global.registry.path | string | `"gs://path/to/registry.db"` | Path to the registry file managed by Feast Python SDK | +| redis.enabled | bool | `false` | Flag to install Redis | +| redis.usePassword | bool | `false` | Disable redis password | +| transformation-service.enabled | bool | `true` | | \ No newline at end of file diff --git a/infra/charts/feast/README.md.gotmpl b/infra/charts/feast/README.md.gotmpl new file mode 100644 index 0000000000..acb4e830e7 --- /dev/null +++ b/infra/charts/feast/README.md.gotmpl @@ -0,0 +1,57 @@ +# Feast Helm Charts + +> :warning: **Disclaimer**: Since Feast 0.10 our vision is to manage all infrastructure for feature store from one place - Feast SDK. But while this new paradigm is still in development, we are planning to support the installation of some Feast components (like Java feature server) through Helm chart presented in this repository. However, we do not expect helm chart to become a long-term solution for deploying Feast components to production, and some frictions still might exist. For example, you will need to manually sync some configurations from [feature_store.yaml](https://docs.feast.dev/reference/feature-repository/feature-store-yaml) into the chart context (like path to the registry file or project name). + +This repo contains Helm charts for Feast components that are being installed on Kubernetes: +* Feast (root chart): The complete Helm chart containing all Feast components and dependencies. Most users will use this chart, but can selectively enable/disable subcharts using the values.yaml file. + * [Feature Server](charts/feature-server): High performant JVM-based implementation of feature server. + * [Transformation Service](charts/transformation-service): Transformation server for calculating on-demand features + * Redis: (Optional) One of possible options for an online store used by Feature Server + +## Chart: Feast + +{{ template "chart.description" . }} Current chart version is `{{ template "chart.version" . }}` + +## Installation + +Charts are published to `https://feast-helm-charts.storage.googleapis.com`. Please note that this URL is different from the URL we previously used (`feast-charts`) + +Run the following commands to add the repository + +``` +helm repo add feast-charts https://feast-helm-charts.storage.googleapis.com +helm repo update +``` + +Install Feast +``` +helm install feast-release feast-charts/feast +``` + +## Customize your installation + +This Feast chart comes with a [values.yaml](values.yaml) that allows for configuration and customization of all sub-charts. + +In order to modify the default configuration of Feature Server, please use the `application-override.yaml` key in the `values.yaml` file in this chart. A code snippet example +``` +feature-server: + application-override.yaml: + enabled: true + feast: + active_store: online + stores: + - name: online + type: REDIS + config: + host: localhost + port: 6379 + +``` + +For the default configuration, please see the [Feature Server Configuration](https://github.com/feast-dev/feast-java/blob/master/serving/src/main/resources/application.yml). + +For more details, please see: https://docs.feast.dev/how-to-guides/running-feast-in-production + +{{ template "chart.requirementsSection" . }} + +{{ template "chart.valuesSection" . }} \ No newline at end of file diff --git a/infra/charts/feast/charts/feature-server/Chart.yaml b/infra/charts/feast/charts/feature-server/Chart.yaml new file mode 100644 index 0000000000..a0c208b9a7 --- /dev/null +++ b/infra/charts/feast/charts/feature-server/Chart.yaml @@ -0,0 +1,10 @@ +apiVersion: v1 +description: "Feast Feature Server: Online feature serving service for Feast" +name: feature-server +version: 0.20.2 +appVersion: v0.20.2 +keywords: +- machine learning +- big data +- mlops +home: https://github.com/feast-dev/feast \ No newline at end of file diff --git a/infra/charts/feast/charts/feature-server/README.md b/infra/charts/feast/charts/feature-server/README.md new file mode 100644 index 0000000000..34415ee20f --- /dev/null +++ b/infra/charts/feast/charts/feature-server/README.md @@ -0,0 +1,70 @@ +# feature-server + +![Version: 0.20.2](https://img.shields.io/badge/Version-0.20.2-informational?style=flat-square) ![AppVersion: v0.20.2](https://img.shields.io/badge/AppVersion-v0.20.2-informational?style=flat-square) + +Feast Feature Server: Online feature serving service for Feast + +**Homepage:** + +## Values + +| Key | Type | Default | Description | +|-----|------|-------------------------------------------------------|-------------| +| "application-generated.yaml".enabled | bool | `true` | Flag to include Helm generated configuration. Please set `application-override.yaml` to override this configuration. | +| "application-override.yaml" | object | `{"enabled":true}` | Configuration to override the default [application.yaml](https://github.com/feast-dev/feast/blob/master/java/serving/src/main/resources/application.yml). Will be created as a ConfigMap. `application-override.yaml` has a higher precedence than `application-secret.yaml` | +| "application-secret.yaml" | object | `{"enabled":true}` | Configuration to override the default [application.yaml](https://github.com/feast-dev/feast/blob/master/java/serving/src/main/resources/application.yml). Will be created as a Secret. `application-override.yaml` has a higher precedence than `application-secret.yaml`. It is recommended to either set `application-override.yaml` or `application-secret.yaml` only to simplify config management. | +| "application.yaml".enabled | bool | `true` | Flag to include the default [configuration](https://github.com/feast-dev/feast/blob/master/java/serving/src/main/resources/application.yml). Please set `application-override.yaml` to override this configuration. | +| envOverrides | object | `{}` | Extra environment variables to set | +| image.pullPolicy | string | `"IfNotPresent"` | Image pull policy | +| image.repository | string | `"feastdev/feature-server-java"` | Docker image for Feature Server repository | +| image.tag | string | `"0.20.2"` | Image tag | +| ingress.grpc.annotations | object | `{}` | Extra annotations for the ingress | +| ingress.grpc.auth.enabled | bool | `false` | Flag to enable auth | +| ingress.grpc.class | string | `"nginx"` | Which ingress controller to use | +| ingress.grpc.enabled | bool | `false` | Flag to create an ingress resource for the service | +| ingress.grpc.hosts | list | `[]` | List of hostnames to match when routing requests | +| ingress.grpc.https.enabled | bool | `true` | Flag to enable HTTPS | +| ingress.grpc.https.secretNames | object | `{}` | Map of hostname to TLS secret name | +| ingress.grpc.whitelist | string | `""` | Allowed client IP source ranges | +| ingress.http.annotations | object | `{}` | Extra annotations for the ingress | +| ingress.http.auth.authUrl | string | `"http://auth-server.auth-ns.svc.cluster.local/auth"` | URL to an existing authentication service | +| ingress.http.auth.enabled | bool | `false` | Flag to enable auth | +| ingress.http.class | string | `"nginx"` | Which ingress controller to use | +| ingress.http.enabled | bool | `false` | Flag to create an ingress resource for the service | +| ingress.http.hosts | list | `[]` | List of hostnames to match when routing requests | +| ingress.http.https.enabled | bool | `true` | Flag to enable HTTPS | +| ingress.http.https.secretNames | object | `{}` | Map of hostname to TLS secret name | +| ingress.http.whitelist | string | `""` | Allowed client IP source ranges | +| javaOpts | string | `nil` | [JVM options](https://docs.oracle.com/cd/E22289_01/html/821-1274/configuring-the-default-jvm-and-java-arguments.html). For better performance, it is advised to set the min and max heap:
`-Xms2048m -Xmx2048m` | +| livenessProbe.enabled | bool | `true` | Flag to enabled the probe | +| livenessProbe.failureThreshold | int | `5` | Min consecutive failures for the probe to be considered failed | +| livenessProbe.initialDelaySeconds | int | `60` | Delay before the probe is initiated | +| livenessProbe.periodSeconds | int | `10` | How often to perform the probe | +| livenessProbe.successThreshold | int | `1` | Min consecutive success for the probe to be considered successful | +| livenessProbe.timeoutSeconds | int | `5` | When the probe times out | +| logLevel | string | `"WARN"` | Default log level, use either one of `DEBUG`, `INFO`, `WARN` or `ERROR` | +| logType | string | `"Console"` | Log format, either `JSON` or `Console` | +| nodeSelector | object | `{}` | Node labels for pod assignment | +| podAnnotations | object | `{}` | Annotations to be added to Feast Serving pods | +| podLabels | object | `{}` | Labels to be added to Feast Serving pods | +| readinessProbe.enabled | bool | `true` | Flag to enabled the probe | +| readinessProbe.failureThreshold | int | `5` | Min consecutive failures for the probe to be considered failed | +| readinessProbe.initialDelaySeconds | int | `15` | Delay before the probe is initiated | +| readinessProbe.periodSeconds | int | `10` | How often to perform the probe | +| readinessProbe.successThreshold | int | `1` | Min consecutive success for the probe to be considered successful | +| readinessProbe.timeoutSeconds | int | `10` | When the probe times out | +| replicaCount | int | `1` | Number of pods that will be created | +| resources | object | `{}` | CPU/memory [resource requests/limit](https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/#resource-requests-and-limits-of-pod-and-container) | +| secrets | list | `[]` | List of Kubernetes secrets to be mounted. These secrets will be mounted on /etc/secrets/. | +| service.grpc.nodePort | string | `nil` | Port number that each cluster node will listen to | +| service.grpc.port | int | `6566` | Service port for GRPC requests | +| service.grpc.targetPort | int | `6566` | Container port serving GRPC requests | +| service.http.nodePort | string | `nil` | Port number that each cluster node will listen to | +| service.http.port | int | `80` | Service port for HTTP requests | +| service.http.targetPort | int | `8080` | Container port serving HTTP requests and Prometheus metrics | +| service.type | string | `"ClusterIP"` | Kubernetes service type | +| transformationService.host | string | `""` | | +| transformationService.port | int | `6566` | | + +---------------------------------------------- +Autogenerated from chart metadata using [helm-docs v1.5.0](https://github.com/norwoodj/helm-docs/releases/v1.5.0) diff --git a/infra/charts/feast/charts/feature-server/templates/_helpers.tpl b/infra/charts/feast/charts/feature-server/templates/_helpers.tpl new file mode 100644 index 0000000000..23f2d81057 --- /dev/null +++ b/infra/charts/feast/charts/feature-server/templates/_helpers.tpl @@ -0,0 +1,45 @@ +{{/* vim: set filetype=mustache: */}} +{{/* +Expand the name of the chart. +*/}} +{{- define "feature-server.name" -}} +{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" -}} +{{- end -}} + +{{/* +Create a default fully qualified app name. +We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). +If release name contains chart name it will be used as a full name. +*/}} +{{- define "feature-server.fullname" -}} +{{- if .Values.fullnameOverride -}} +{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" -}} +{{- else -}} +{{- $name := default .Chart.Name .Values.nameOverride -}} +{{- if contains $name .Release.Name -}} +{{- .Release.Name | trunc 63 | trimSuffix "-" -}} +{{- else -}} +{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" -}} +{{- end -}} +{{- end -}} +{{- end -}} + +{{/* +Create chart name and version as used by the chart label. +*/}} +{{- define "feature-server.chart" -}} +{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" -}} +{{- end -}} + +{{/* +Common labels +*/}} +{{- define "feature-server.labels" -}} +app.kubernetes.io/name: {{ include "feature-server.name" . }} +helm.sh/chart: {{ include "feature-server.chart" . }} +app.kubernetes.io/instance: {{ .Release.Name }} +{{- if .Chart.AppVersion }} +app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} +{{- end }} +app.kubernetes.io/managed-by: {{ .Release.Service }} +{{- end -}} diff --git a/infra/charts/feast/charts/feature-server/templates/_ingress.yaml b/infra/charts/feast/charts/feature-server/templates/_ingress.yaml new file mode 100644 index 0000000000..5bed6df047 --- /dev/null +++ b/infra/charts/feast/charts/feature-server/templates/_ingress.yaml @@ -0,0 +1,68 @@ +{{- /* +This takes an array of three values: +- the top context +- the feast component +- the service protocol +- the ingress context +*/ -}} +{{- define "feast.ingress" -}} +{{- $top := (index . 0) -}} +{{- $component := (index . 1) -}} +{{- $protocol := (index . 2) -}} +{{- $ingressValues := (index . 3) -}} +apiVersion: extensions/v1beta1 +kind: Ingress +{{ include "feast.ingress.metadata" . }} +spec: + rules: + {{- range $host := $ingressValues.hosts }} + - host: {{ $host }} + http: + paths: + - path: / + backend: + serviceName: {{ include (printf "feast-%s.fullname" $component) $top }} + servicePort: {{ index $top.Values "service" $protocol "port" }} + {{- end }} +{{- if $ingressValues.https.enabled }} + tls: + {{- range $host := $ingressValues.hosts }} + - secretName: {{ index $ingressValues.https.secretNames $host | default (splitList "." $host | rest | join "-" | printf "%s-tls") }} + hosts: + - {{ $host }} + {{- end }} +{{- end -}} +{{- end -}} + +{{- define "feast.ingress.metadata" -}} +{{- $commonMetadata := fromYaml (include "common.metadata" (first .)) }} +{{- $overrides := fromYaml (include "feast.ingress.metadata-overrides" .) -}} +{{- toYaml (merge $overrides $commonMetadata) -}} +{{- end -}} + +{{- define "feast.ingress.metadata-overrides" -}} +{{- $top := (index . 0) -}} +{{- $component := (index . 1) -}} +{{- $protocol := (index . 2) -}} +{{- $ingressValues := (index . 3) -}} +{{- $commonFullname := include "common.fullname" $top }} +metadata: + name: {{ $commonFullname }}-{{ $component }}-{{ $protocol }} + annotations: + kubernetes.io/ingress.class: {{ $ingressValues.class | quote }} + {{- if (and (eq $ingressValues.class "nginx") $ingressValues.auth.enabled) }} + nginx.ingress.kubernetes.io/auth-url: {{ $ingressValues.auth.authUrl | quote }} + nginx.ingress.kubernetes.io/auth-response-headers: "x-auth-request-email, x-auth-request-user" + nginx.ingress.kubernetes.io/auth-signin: "https://{{ $ingressValues.auth.signinHost | default (splitList "." (index $ingressValues.hosts 0) | rest | join "." | printf "auth.%s")}}/oauth2/start?rd=/r/$host/$request_uri" + {{- end }} + {{- if (and (eq $ingressValues.class "nginx") $ingressValues.whitelist) }} + nginx.ingress.kubernetes.io/whitelist-source-range: {{ $ingressValues.whitelist | quote -}} + {{- end }} + {{- if (and (eq $ingressValues.class "nginx") (eq $protocol "grpc") ) }} + # TODO: Allow choice of GRPC/GRPCS + nginx.ingress.kubernetes.io/backend-protocol: "GRPC" + {{- end }} + {{- if $ingressValues.annotations -}} + {{ include "common.annote" $ingressValues.annotations | indent 4 }} + {{- end }} +{{- end -}} diff --git a/infra/charts/feast/charts/feature-server/templates/configmap.yaml b/infra/charts/feast/charts/feature-server/templates/configmap.yaml new file mode 100644 index 0000000000..fbf2633e8e --- /dev/null +++ b/infra/charts/feast/charts/feature-server/templates/configmap.yaml @@ -0,0 +1,53 @@ +apiVersion: v1 +kind: ConfigMap +metadata: + name: {{ template "feature-server.fullname" . }} + namespace: {{ .Release.Namespace }} + labels: + app: {{ template "feature-server.name" . }} + component: serving + chart: {{ .Chart.Name }}-{{ .Chart.Version | replace "+" "_" }} + release: {{ .Release.Name }} + heritage: {{ .Release.Service }} +data: + application-generated.yaml: | +{{- if index .Values "application-generated.yaml" "enabled" }} + feast: + registry: {{ .Values.global.registry.path }} + registryRefreshInterval: {{ .Values.global.registry.cache_ttl_seconds }} + {{- if .Values.transformationService.host }} + transformationServiceEndpoint: {{ .Values.transformationService.host}}:{{ .Values.transformationService.port }} + {{- else }} + transformationServiceEndpoint: {{ .Release.Name }}-transformation-service:{{ .Values.transformationService.port }} + {{- end }} + + activeStore: online + stores: + - name: online + type: REDIS + config: + host: {{ .Release.Name }}-redis-master + port: 6379 + rest: + server: + port: {{ .Values.service.http.targetPort }} + grpc: + server: + port: {{ .Values.service.grpc.targetPort }} +{{- end }} + + application-override.yaml: | +{{- if index .Values "application-override.yaml" "enabled" }} + {{- if index .Values "application-override.yaml" "feast" }} + feast: {{- toYaml (index .Values "application-override.yaml" "feast") | nindent 6 }} + registry: {{ .Values.global.registry.path }} + registryRefreshInterval: {{ .Values.global.registry.cache_ttl_seconds }} + project: {{ .Values.global.project }} + {{- end }} + {{- if index .Values "application-override.yaml" "rest" }} + rest: {{- toYaml (index .Values "application-override.yaml" "rest") | nindent 6 }} + {{- end }} + {{- if index .Values "application-override.yaml" "grpc" }} + grpc: {{- toYaml (index .Values "application-override.yaml" "grpc") | nindent 6 }} + {{- end }} +{{- end }} diff --git a/infra/charts/feast/charts/feature-server/templates/deployment.yaml b/infra/charts/feast/charts/feature-server/templates/deployment.yaml new file mode 100644 index 0000000000..1d1bc40029 --- /dev/null +++ b/infra/charts/feast/charts/feature-server/templates/deployment.yaml @@ -0,0 +1,145 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ template "feature-server.fullname" . }} + namespace: {{ .Release.Namespace }} + labels: + app: {{ template "feature-server.name" . }} + component: serving + chart: {{ .Chart.Name }}-{{ .Chart.Version | replace "+" "_" }} + release: {{ .Release.Name }} + heritage: {{ .Release.Service }} +spec: + replicas: {{ .Values.replicaCount }} + selector: + matchLabels: + app: {{ template "feature-server.name" . }} + component: serving + release: {{ .Release.Name }} + template: + metadata: + annotations: + checksum/configmap: {{ include (print $.Template.BasePath "/configmap.yaml") . | sha256sum }} + checksum/secret: {{ include (print $.Template.BasePath "/secret.yaml") . | sha256sum }} + {{- if .Values.podAnnotations }} + {{ toYaml .Values.podAnnotations | nindent 8 }} + {{- end }} + labels: + app: {{ template "feature-server.name" . }} + component: serving + release: {{ .Release.Name }} + {{- if .Values.podLabels }} + {{ toYaml .Values.podLabels | nindent 8 }} + {{- end }} + spec: + {{- with .Values.nodeSelector }} + nodeSelector: + {{- toYaml . | nindent 8 }} + {{- end }} + + volumes: + - name: {{ template "feature-server.fullname" . }}-config + configMap: + name: {{ template "feature-server.fullname" . }} + - name: {{ template "feature-server.fullname" . }}-secret + secret: + secretName: {{ template "feature-server.fullname" . }} + {{- range $secret := .Values.secrets }} + - name: {{ $secret }} + secret: + secretName: {{ $secret }} + {{- end }} + + containers: + - name: {{ .Chart.Name }} + image: {{ .Values.image.repository }}:{{ .Values.image.tag }} + imagePullPolicy: {{ .Values.image.pullPolicy }} + + volumeMounts: + - name: {{ template "feature-server.fullname" . }}-config + mountPath: /etc/feast + - name: {{ template "feature-server.fullname" . }}-secret + mountPath: /etc/secrets/feast + readOnly: true + {{- range $secret := .Values.secrets }} + - name: {{ $secret }} + mountPath: "/etc/secrets/{{ $secret }}" + readOnly: true + {{- end }} + + env: + - name: LOG_TYPE + value: {{ .Values.logType | quote }} + - name: LOG_LEVEL + value: {{ .Values.logLevel | quote }} + + {{- if .Values.javaOpts }} + - name: JAVA_TOOL_OPTIONS + value: {{ .Values.javaOpts }} + {{- end }} + + {{- range $key, $value := .Values.envOverrides }} + - name: {{ printf "%s" $key | replace "." "_" | upper | quote }} + {{- if eq (kindOf $value) "map" }} + valueFrom: + {{- toYaml $value | nindent 12 }} + {{- else }} + value: {{ $value | quote }} + {{- end }} + {{- end }} + + command: + - java + - -jar + - /opt/feast/feast-serving.jar + - {{ if index .Values "application.yaml" "enabled" -}} + classpath:/application.yml + {{- end }} + {{- if index .Values "application-generated.yaml" "enabled" -}} + ,file:/etc/feast/application-generated.yaml + {{- end }} + {{- if index .Values "application-secret.yaml" "enabled" -}} + ,file:/etc/secrets/feast/application-secret.yaml + {{- end }} + {{- if index .Values "application-override.yaml" "enabled" -}} + ,file:/etc/feast/application-override.yaml + {{- end }} + + ports: + - name: http + containerPort: {{ .Values.service.http.targetPort }} + - name: grpc + containerPort: {{ .Values.service.grpc.targetPort }} + + {{- if .Values.livenessProbe.enabled }} + livenessProbe: + exec: + command: + - "grpc-health-probe" + - "-addr=:{{ .Values.service.grpc.targetPort }}" + - "-connect-timeout={{ .Values.livenessProbe.timeoutSeconds }}s" + - "-rpc-timeout={{ .Values.livenessProbe.timeoutSeconds }}s" + initialDelaySeconds: {{ .Values.livenessProbe.initialDelaySeconds }} + periodSeconds: {{ .Values.livenessProbe.periodSeconds }} + successThreshold: {{ .Values.livenessProbe.successThreshold }} + timeoutSeconds: {{ .Values.livenessProbe.timeoutSeconds }} + failureThreshold: {{ .Values.livenessProbe.failureThreshold }} + {{- end }} + + {{- if .Values.readinessProbe.enabled }} + readinessProbe: + exec: + command: + - "grpc-health-probe" + - "-addr=:{{ .Values.service.grpc.targetPort }}" + - "-connect-timeout={{ .Values.readinessProbe.timeoutSeconds }}s" + - "-rpc-timeout={{ .Values.readinessProbe.timeoutSeconds }}s" + initialDelaySeconds: {{ .Values.readinessProbe.initialDelaySeconds }} + periodSeconds: {{ .Values.readinessProbe.periodSeconds }} + successThreshold: {{ .Values.readinessProbe.successThreshold }} + timeoutSeconds: {{ .Values.readinessProbe.timeoutSeconds }} + failureThreshold: {{ .Values.readinessProbe.failureThreshold }} + {{- end }} + + resources: + {{- toYaml .Values.resources | nindent 10 }} diff --git a/infra/charts/feast/charts/feature-server/templates/ingress.yaml b/infra/charts/feast/charts/feature-server/templates/ingress.yaml new file mode 100644 index 0000000000..1bcd176147 --- /dev/null +++ b/infra/charts/feast/charts/feature-server/templates/ingress.yaml @@ -0,0 +1,7 @@ +{{- if .Values.ingress.http.enabled -}} +{{ template "feast.ingress" (list . "serving" "http" .Values.ingress.http) }} +{{- end }} +--- +{{ if .Values.ingress.grpc.enabled -}} +{{ template "feast.ingress" (list . "serving" "grpc" .Values.ingress.grpc) }} +{{- end }} diff --git a/infra/charts/feast/charts/feature-server/templates/secret.yaml b/infra/charts/feast/charts/feature-server/templates/secret.yaml new file mode 100644 index 0000000000..b6aa88c258 --- /dev/null +++ b/infra/charts/feast/charts/feature-server/templates/secret.yaml @@ -0,0 +1,23 @@ +apiVersion: v1 +kind: Secret +metadata: + name: {{ template "feature-server.fullname" . }} + namespace: {{ .Release.Namespace }} + labels: + app: {{ template "feature-server.name" . }} + component: serving + chart: {{ .Chart.Name }}-{{ .Chart.Version | replace "+" "_" }} + release: {{ .Release.Name }} + heritage: {{ .Release.Service }} +type: Opaque +stringData: + application-secret.yaml: | + {{- if index .Values "application-secret.yaml" "feast" }} + feast: {{- toYaml (index .Values "application-secret.yaml" "feast") | nindent 6 }} + {{- end }} + {{- if index .Values "application-secret.yaml" "rest" }} + rest: {{- toYaml (index .Values "application-secret.yaml" "rest") | nindent 6 }} + {{- end }} + {{- if index .Values "application-secret.yaml" "grpc" }} + grpc: {{- toYaml (index .Values "application-secret.yaml" "grpc") | nindent 6 }} + {{- end }} diff --git a/infra/charts/feast/charts/feature-server/templates/service.yaml b/infra/charts/feast/charts/feature-server/templates/service.yaml new file mode 100644 index 0000000000..037fe03870 --- /dev/null +++ b/infra/charts/feast/charts/feature-server/templates/service.yaml @@ -0,0 +1,40 @@ +apiVersion: v1 +kind: Service +metadata: + name: {{ template "feature-server.fullname" . }} + namespace: {{ .Release.Namespace }} + labels: + app: {{ template "feature-server.name" . }} + chart: {{ .Chart.Name }}-{{ .Chart.Version | replace "+" "_" }} + release: {{ .Release.Name }} + heritage: {{ .Release.Service }} + {{- with .Values.service.annotations }} + annotations: +{{ toYaml . | indent 4 }} + {{- end }} +spec: + type: {{ .Values.service.type }} + {{- if .Values.service.loadBalancerIP }} + loadBalancerIP: {{ .Values.service.loadBalancerIP }} + {{- end }} + {{- if .Values.service.loadBalancerSourceRanges }} + loadBalancerSourceRanges: +{{ toYaml .Values.service.loadBalancerSourceRanges | indent 2 }} + {{- end }} + ports: + - name: http + port: {{ .Values.service.http.port }} + targetPort: {{ .Values.service.http.targetPort }} + {{- if .Values.service.http.nodePort }} + nodePort: {{ .Values.service.http.nodePort }} + {{- end }} + - name: grpc + port: {{ .Values.service.grpc.port }} + targetPort: {{ .Values.service.grpc.targetPort }} + {{- if .Values.service.grpc.nodePort }} + nodePort: {{ .Values.service.grpc.nodePort }} + {{- end }} + selector: + app: {{ template "feature-server.name" . }} + component: serving + release: {{ .Release.Name }} diff --git a/infra/charts/feast/charts/feature-server/values.yaml b/infra/charts/feast/charts/feature-server/values.yaml new file mode 100644 index 0000000000..9cade47e60 --- /dev/null +++ b/infra/charts/feast/charts/feature-server/values.yaml @@ -0,0 +1,147 @@ +# replicaCount -- Number of pods that will be created +replicaCount: 1 + +image: + # image.repository -- Docker image for Feature Server repository + repository: feastdev/feature-server-java + # image.tag -- Image tag + tag: 0.20.2 + # image.pullPolicy -- Image pull policy + pullPolicy: IfNotPresent + + +transformationService: + host: "" + port: 6566 + + +application.yaml: + # "application.yaml".enabled -- Flag to include the default [configuration](https://github.com/feast-dev/feast/blob/master/java/serving/src/main/resources/application.yml). Please set `application-override.yaml` to override this configuration. + enabled: true + +application-generated.yaml: + # "application-generated.yaml".enabled -- Flag to include Helm generated configuration. Please set `application-override.yaml` to override this configuration. + enabled: true + +# "application-secret.yaml" -- Configuration to override the default [application.yaml](https://github.com/feast-dev/feast/blob/master/java/serving/src/main/resources/application.yml). Will be created as a Secret. `application-override.yaml` has a higher precedence than `application-secret.yaml`. It is recommended to either set `application-override.yaml` or `application-secret.yaml` only to simplify config management. +application-secret.yaml: + enabled: false + +# "application-override.yaml" -- Configuration to override the default [application.yaml](https://github.com/feast-dev/feast/blob/master/java/serving/src/main/resources/application.yml). Will be created as a ConfigMap. `application-override.yaml` has a higher precedence than `application-secret.yaml` +application-override.yaml: + enabled: true + +# javaOpts -- [JVM options](https://docs.oracle.com/cd/E22289_01/html/821-1274/configuring-the-default-jvm-and-java-arguments.html). For better performance, it is advised to set the min and max heap:
`-Xms2048m -Xmx2048m` +javaOpts: + +# logType -- Log format, either `JSON` or `Console` +logType: Console +# logLevel -- Default log level, use either one of `DEBUG`, `INFO`, `WARN` or `ERROR` +logLevel: WARN + + +livenessProbe: + # livenessProbe.enabled -- Flag to enabled the probe + enabled: true + # livenessProbe.initialDelaySeconds -- Delay before the probe is initiated + initialDelaySeconds: 60 + # livenessProbe.periodSeconds -- How often to perform the probe + periodSeconds: 10 + # livenessProbe.timeoutSeconds -- When the probe times out + timeoutSeconds: 5 + # livenessProbe.successThreshold -- Min consecutive success for the probe to be considered successful + successThreshold: 1 + # livenessProbe.failureThreshold -- Min consecutive failures for the probe to be considered failed + failureThreshold: 5 + +readinessProbe: + # readinessProbe.enabled -- Flag to enabled the probe + enabled: true + # readinessProbe.initialDelaySeconds -- Delay before the probe is initiated + initialDelaySeconds: 15 + # readinessProbe.periodSeconds -- How often to perform the probe + periodSeconds: 10 + # readinessProbe.timeoutSeconds -- When the probe times out + timeoutSeconds: 10 + # readinessProbe.successThreshold -- Min consecutive success for the probe to be considered successful + successThreshold: 1 + # readinessProbe.failureThreshold -- Min consecutive failures for the probe to be considered failed + failureThreshold: 5 + +service: + # service.type -- Kubernetes service type + type: ClusterIP + http: + # service.http.port -- Service port for HTTP requests + port: 80 + # service.http.targetPort -- Container port serving HTTP requests and Prometheus metrics + targetPort: 8080 + # service.http.nodePort -- Port number that each cluster node will listen to + nodePort: + grpc: + # service.grpc.port -- Service port for GRPC requests + port: 6566 + # service.grpc.targetPort -- Container port serving GRPC requests + targetPort: 6566 + # service.grpc.nodePort -- Port number that each cluster node will listen to + nodePort: + +ingress: + grpc: + # ingress.grpc.enabled -- Flag to create an ingress resource for the service + enabled: false + # ingress.grpc.class -- Which ingress controller to use + class: nginx + # ingress.grpc.hosts -- List of hostnames to match when routing requests + hosts: [] + # ingress.grpc.annotations -- Extra annotations for the ingress + annotations: {} + https: + # ingress.grpc.https.enabled -- Flag to enable HTTPS + enabled: true + # ingress.grpc.https.secretNames -- Map of hostname to TLS secret name + secretNames: {} + # ingress.grpc.whitelist -- Allowed client IP source ranges + whitelist: "" + auth: + # ingress.grpc.auth.enabled -- Flag to enable auth + enabled: false + http: + # ingress.http.enabled -- Flag to create an ingress resource for the service + enabled: false + # ingress.http.class -- Which ingress controller to use + class: nginx + # ingress.http.hosts -- List of hostnames to match when routing requests + hosts: [] + # ingress.http.annotations -- Extra annotations for the ingress + annotations: {} + https: + # ingress.http.https.enabled -- Flag to enable HTTPS + enabled: true + # ingress.http.https.secretNames -- Map of hostname to TLS secret name + secretNames: {} + # ingress.http.whitelist -- Allowed client IP source ranges + whitelist: "" + auth: + # ingress.http.auth.enabled -- Flag to enable auth + enabled: false + # ingress.http.auth.authUrl -- URL to an existing authentication service + authUrl: http://auth-server.auth-ns.svc.cluster.local/auth + +# resources -- CPU/memory [resource requests/limit](https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/#resource-requests-and-limits-of-pod-and-container) +resources: {} + +# nodeSelector -- Node labels for pod assignment +nodeSelector: {} + +# envOverrides -- Extra environment variables to set +envOverrides: {} + +# secrets -- List of Kubernetes secrets to be mounted. These secrets will be mounted on /etc/secrets/. +secrets: [] + +# podAnnotations -- Annotations to be added to Feast Serving pods +podAnnotations: {} + +# podLabels -- Labels to be added to Feast Serving pods +podLabels: {} diff --git a/infra/charts/feast/charts/redis-10.5.6.tgz b/infra/charts/feast/charts/redis-10.5.6.tgz new file mode 100644 index 0000000000..f1e4ec4105 Binary files /dev/null and b/infra/charts/feast/charts/redis-10.5.6.tgz differ diff --git a/infra/charts/feast/charts/transformation-service/Chart.yaml b/infra/charts/feast/charts/transformation-service/Chart.yaml new file mode 100644 index 0000000000..2de15e15f6 --- /dev/null +++ b/infra/charts/feast/charts/transformation-service/Chart.yaml @@ -0,0 +1,10 @@ +apiVersion: v1 +description: "Transformation service: to compute on-demand features" +name: transformation-service +version: 0.20.2 +appVersion: v0.20.2 +keywords: +- machine learning +- big data +- mlops +home: https://github.com/feast-dev/feast \ No newline at end of file diff --git a/infra/charts/feast/charts/transformation-service/README.md b/infra/charts/feast/charts/transformation-service/README.md new file mode 100644 index 0000000000..d7044c1abf --- /dev/null +++ b/infra/charts/feast/charts/transformation-service/README.md @@ -0,0 +1,27 @@ +# transformation-service + +![Version: 0.20.2](https://img.shields.io/badge/Version-0.20.2-informational?style=flat-square) ![AppVersion: v0.20.2](https://img.shields.io/badge/AppVersion-v0.20.2-informational?style=flat-square) + +Transformation service: to compute on-demand features + +**Homepage:** + +## Values + +| Key | Type | Default | Description | +|-----|------|--------------------------------------------|-------------| +| envOverrides | object | `{}` | Extra environment variables to set | +| image.pullPolicy | string | `"IfNotPresent"` | Image pull policy | +| image.repository | string | `"feastdev/feature-transformation-server"` | Docker image for Transformation Server repository | +| image.tag | string | `"0.20.2"` | Image tag | +| nodeSelector | object | `{}` | Node labels for pod assignment | +| podLabels | object | `{}` | Labels to be added to Feast Serving pods | +| replicaCount | int | `1` | Number of pods that will be created | +| resources | object | `{}` | CPU/memory [resource requests/limit](https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/#resource-requests-and-limits-of-pod-and-container) | +| service.grpc.nodePort | string | `nil` | Port number that each cluster node will listen to | +| service.grpc.port | int | `6566` | Service port for GRPC requests | +| service.grpc.targetPort | int | `6566` | Container port serving GRPC requests | +| service.type | string | `"ClusterIP"` | Kubernetes service type | + +---------------------------------------------- +Autogenerated from chart metadata using [helm-docs v1.5.0](https://github.com/norwoodj/helm-docs/releases/v1.5.0) diff --git a/infra/charts/feast/charts/transformation-service/config/feature_store.yaml b/infra/charts/feast/charts/transformation-service/config/feature_store.yaml new file mode 100644 index 0000000000..555e93a306 --- /dev/null +++ b/infra/charts/feast/charts/transformation-service/config/feature_store.yaml @@ -0,0 +1,8 @@ +registry: + path: {{ .Values.global.registry.path }} + cache_ttl_seconds: {{ .Values.global.registry.cache_ttl_seconds }} +provider: local +project: {{ .Values.global.project }} +flags: + on_demand_transforms: true + alpha_features: true \ No newline at end of file diff --git a/infra/charts/feast/charts/transformation-service/templates/_helpers.tpl b/infra/charts/feast/charts/transformation-service/templates/_helpers.tpl new file mode 100644 index 0000000000..8a55719632 --- /dev/null +++ b/infra/charts/feast/charts/transformation-service/templates/_helpers.tpl @@ -0,0 +1,45 @@ +{{/* vim: set filetype=mustache: */}} +{{/* +Expand the name of the chart. +*/}} +{{- define "transformation-service.name" -}} +{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" -}} +{{- end -}} + +{{/* +Create a default fully qualified app name. +We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). +If release name contains chart name it will be used as a full name. +*/}} +{{- define "transformation-service.fullname" -}} +{{- if .Values.fullnameOverride -}} +{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" -}} +{{- else -}} +{{- $name := default .Chart.Name .Values.nameOverride -}} +{{- if contains $name .Release.Name -}} +{{- .Release.Name | trunc 63 | trimSuffix "-" -}} +{{- else -}} +{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" -}} +{{- end -}} +{{- end -}} +{{- end -}} + +{{/* +Create chart name and version as used by the chart label. +*/}} +{{- define "transformation-service.chart" -}} +{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" -}} +{{- end -}} + +{{/* +Common labels +*/}} +{{- define "transformation-service.labels" -}} +app.kubernetes.io/name: {{ include "transformation-service.name" . }} +helm.sh/chart: {{ include "transformation-service.chart" . }} +app.kubernetes.io/instance: {{ .Release.Name }} +{{- if .Chart.AppVersion }} +app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} +{{- end }} +app.kubernetes.io/managed-by: {{ .Release.Service }} +{{- end -}} diff --git a/infra/charts/feast/charts/transformation-service/templates/deployment.yaml b/infra/charts/feast/charts/transformation-service/templates/deployment.yaml new file mode 100644 index 0000000000..1b2172c305 --- /dev/null +++ b/infra/charts/feast/charts/transformation-service/templates/deployment.yaml @@ -0,0 +1,48 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ template "transformation-service.fullname" . }} + namespace: {{ .Release.Namespace }} + labels: + app: {{ template "transformation-service.name" . }} + component: serving + chart: {{ .Chart.Name }}-{{ .Chart.Version | replace "+" "_" }} + release: {{ .Release.Name }} + heritage: {{ .Release.Service }} +spec: + replicas: {{ .Values.replicaCount }} + selector: + matchLabels: + app: {{ template "transformation-service.name" . }} + component: serving + release: {{ .Release.Name }} + template: + metadata: + labels: + app: {{ template "transformation-service.name" . }} + component: serving + release: {{ .Release.Name }} + {{- if .Values.podLabels }} + {{ toYaml .Values.podLabels | nindent 8 }} + {{- end }} + spec: + {{- with .Values.nodeSelector }} + nodeSelector: + {{- toYaml . | nindent 8 }} + {{- end }} + containers: + - name: {{ .Chart.Name }} + image: {{ .Values.image.repository }}:{{ .Values.image.tag }} + imagePullPolicy: {{ .Values.image.pullPolicy }} + ports: + - name: grpc + containerPort: {{ .Values.service.grpc.targetPort }} + + env: + - name: FEATURE_TRANSFORMATION_SERVER_PORT + value: {{ .Values.service.grpc.targetPort | quote }} + - name: FEATURE_STORE_YAML_BASE64 + value: {{ tpl (.Files.Get "config/feature_store.yaml") . | b64enc | quote }} + + resources: + {{- toYaml .Values.resources | nindent 10 }} diff --git a/infra/charts/feast/charts/transformation-service/templates/service.yaml b/infra/charts/feast/charts/transformation-service/templates/service.yaml new file mode 100644 index 0000000000..6ea7c94162 --- /dev/null +++ b/infra/charts/feast/charts/transformation-service/templates/service.yaml @@ -0,0 +1,27 @@ +apiVersion: v1 +kind: Service +metadata: + name: {{ template "transformation-service.fullname" . }} + namespace: {{ .Release.Namespace }} + labels: + app: {{ template "transformation-service.name" . }} + chart: {{ .Chart.Name }}-{{ .Chart.Version | replace "+" "_" }} + release: {{ .Release.Name }} + heritage: {{ .Release.Service }} + {{- with .Values.service.annotations }} + annotations: +{{ toYaml . | indent 4 }} + {{- end }} +spec: + type: {{ .Values.service.type }} + ports: + - name: grpc + port: {{ .Values.service.grpc.port }} + targetPort: {{ .Values.service.grpc.targetPort }} + {{- if .Values.service.grpc.nodePort }} + nodePort: {{ .Values.service.grpc.nodePort }} + {{- end }} + selector: + app: {{ template "transformation-service.name" . }} + component: serving + release: {{ .Release.Name }} diff --git a/infra/charts/feast/charts/transformation-service/values.yaml b/infra/charts/feast/charts/transformation-service/values.yaml new file mode 100644 index 0000000000..bae9179478 --- /dev/null +++ b/infra/charts/feast/charts/transformation-service/values.yaml @@ -0,0 +1,34 @@ +# replicaCount -- Number of pods that will be created +replicaCount: 1 + +image: + # image.repository -- Docker image for Transformation Server repository + repository: feastdev/feature-transformation-server + # image.tag -- Image tag + tag: 0.20.2 + # image.pullPolicy -- Image pull policy + pullPolicy: IfNotPresent + +service: + # service.type -- Kubernetes service type + type: ClusterIP + grpc: + # service.grpc.port -- Service port for GRPC requests + port: 6566 + # service.grpc.targetPort -- Container port serving GRPC requests + targetPort: 6566 + # service.grpc.nodePort -- Port number that each cluster node will listen to + nodePort: + + +# resources -- CPU/memory [resource requests/limit](https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/#resource-requests-and-limits-of-pod-and-container) +resources: {} + +# nodeSelector -- Node labels for pod assignment +nodeSelector: {} + +# envOverrides -- Extra environment variables to set +envOverrides: {} + +# podLabels -- Labels to be added to Feast Serving pods +podLabels: {} \ No newline at end of file diff --git a/infra/charts/feast/requirements.yaml b/infra/charts/feast/requirements.yaml new file mode 100644 index 0000000000..b1535ea701 --- /dev/null +++ b/infra/charts/feast/requirements.yaml @@ -0,0 +1,15 @@ +dependencies: +- name: feature-server + alias: feature-server + version: 0.20.2 + condition: feature-server.enabled + repository: https://feast-helm-charts.storage.googleapis.com +- name: transformation-service + alias: transformation-service + version: 0.20.2 + condition: transformation-service.enabled + repository: https://feast-helm-charts.storage.googleapis.com +- name: redis + version: 10.5.6 + repository: https://charts.helm.sh/stable + condition: redis.enabled \ No newline at end of file diff --git a/infra/charts/feast/values.yaml b/infra/charts/feast/values.yaml new file mode 100644 index 0000000000..2e65f9e63f --- /dev/null +++ b/infra/charts/feast/values.yaml @@ -0,0 +1,23 @@ +feature-server: + enabled: true + + +transformation-service: + enabled: true + +redis: + # redis.enabled -- Flag to install Redis + enabled: false + # redis.usePassword -- Disable redis password + usePassword: false + +global: + # global.registry -- Information about registry managed by Feast Python SDK (must be in sync with feature_store.yaml) + registry: + # global.registry.path -- Path to the registry file managed by Feast Python SDK + path: gs://path/to/registry.db + # global.registry.cache_ttl_seconds -- Registry cache (in memory) will be refreshed on this interval + cache_ttl_seconds: 0 + + # global.project -- Project from feature_store.yaml + project: default \ No newline at end of file diff --git a/infra/docker-compose/docker-compose.yml b/infra/docker-compose/docker-compose.yml index 98131d6ccf..579dc6d65f 100644 --- a/infra/docker-compose/docker-compose.yml +++ b/infra/docker-compose/docker-compose.yml @@ -16,7 +16,7 @@ services: - java - -jar - /opt/feast/feast-core.jar - - --spring.config.location=classpath:/application.yml,file:/etc/feast/application.yml + - classpath:/application.yml,file:/etc/feast/application.yml jobservice: image: gcr.io/kf-feast/feast-jobservice:${FEAST_VERSION} @@ -104,7 +104,7 @@ services: - java - -jar - /opt/feast/feast-serving.jar - - --spring.config.location=classpath:/application.yml,file:/etc/feast/application.yml + - classpath:/application.yml,file:/etc/feast/application.yml redis: image: redis:5-alpine diff --git a/infra/scripts/compile-templates.py b/infra/scripts/compile-templates.py new file mode 100644 index 0000000000..e3130ab419 --- /dev/null +++ b/infra/scripts/compile-templates.py @@ -0,0 +1,53 @@ +try: + from jinja2 import Template +except ImportError: + raise ImportError( + "Please install Jinja in order for template generation to succeed" + ) + +############################ +# Find the repo root +############################ + +from pathlib import Path + + +def find_repo(path): + # Find repository root from the path's parents + for path in Path(path).parents: + # Check whether "path/.git" exists and is a directory + git_dir = path / ".git" + if git_dir.is_dir(): + return path + + +# Find the repo root where the script is +repo_root = find_repo(__file__) + +############################ +# Template README.md +############################ +roadmap_path = repo_root / "docs" / "roadmap.md" +with open(roadmap_path, "r") as f: + # skip first lines since it has the title + roadmap_contents_lines = f.readlines()[2:] + + # Join back again + roadmap_contents = "".join(roadmap_contents_lines) + +template_path = repo_root / "infra" / "templates" / "README.md.jinja2" +with open(template_path) as f: + template = Template(f.read()) + +# Compile template +readme_md = template.render(roadmap_contents=roadmap_contents) + +# Add warning to generated file +readme_md = ( + "\n\n" + + readme_md +) + +readme_path = repo_root / "README.md" +with open(readme_path, "w") as f: + f.write(readme_md) diff --git a/infra/scripts/create-cluster.sh b/infra/scripts/create-cluster.sh new file mode 100755 index 0000000000..24b961b977 --- /dev/null +++ b/infra/scripts/create-cluster.sh @@ -0,0 +1,98 @@ +#!/usr/bin/env bash +# Settings +# Make sure you run "brew install redis" + +# BIN_PATH="/opt/homebrew/bin" +REDIS_CLI=`which redis-cli` +REDIS_SERVER=`which redis-server` +CLUSTER_HOST=127.0.0.1 +# Creates a cluster at ports 6001-6006 with 3 masters 6001-6003 and 3 slaves 6004-6006 +PORT=${2:-6000} +TIMEOUT=2000 +NODES=6 +REPLICAS=1 +PROTECTED_MODE=yes +ADDITIONAL_OPTIONS="" + +if [ -a config.sh ] +then + source "config.sh" +fi + +# Computed vars +ENDPORT=$((PORT+NODES)) + +if [ "$1" == "start" ] +then + while [ $((PORT < ENDPORT)) != "0" ]; do + PORT=$((PORT+1)) + echo "Starting $PORT" + $REDIS_SERVER --port $PORT --protected-mode $PROTECTED_MODE --cluster-enabled yes --cluster-config-file nodes-${PORT}.conf --cluster-node-timeout $TIMEOUT --appendonly yes --appendfilename appendonly-${PORT}.aof --dbfilename dump-${PORT}.rdb --logfile ${PORT}.log --daemonize yes ${ADDITIONAL_OPTIONS} + done + exit 0 +fi + +if [ "$1" == "create" ] +then + HOSTS="" + while [ $((PORT < ENDPORT)) != "0" ]; do + PORT=$((PORT+1)) + HOSTS="$HOSTS $CLUSTER_HOST:$PORT" + done + OPT_ARG="" + if [ "$2" == "-f" ]; then + OPT_ARG="--cluster-yes" + fi + $REDIS_CLI --cluster create $HOSTS --cluster-replicas $REPLICAS $OPT_ARG + exit 0 +fi + +if [ "$1" == "stop" ] +then + while [ $((PORT < ENDPORT)) != "0" ]; do + PORT=$((PORT+1)) + echo "Stopping $PORT" + $REDIS_CLI -p $PORT shutdown nosave + done + exit 0 +fi + +if [ "$1" == "watch" ] +then + PORT=$((PORT+1)) + while [ 1 ]; do + clear + date + $REDIS_CLI -p $PORT cluster nodes | head -30 + sleep 1 + done + exit 0 +fi + +if [ "$1" == "clean" ] +then + echo "Cleaning *.log" + rm -rf *.log + echo "Cleaning appendonly-*" + rm -rf appendonly-* + echo "Cleaning dump-*.rdb" + rm -rf dump-*.rdb + echo "Cleaning nodes-*.conf" + rm -rf nodes-*.conf + exit 0 +fi + +if [ "$1" == "clean-logs" ] +then + echo "Cleaning *.log" + rm -rf *.log + exit 0 +fi + +echo "Usage: $0 [start|create|stop|watch|clean|clean-logs|call]" +echo "start [PORT] -- Launch Redis Cluster instances." +echo "create [PORT] [-f] -- Create a cluster using redis-cli --cluster create." +echo "stop [PORT] -- Stop Redis Cluster instances." +echo "watch [PORT] -- Show CLUSTER NODES output (first 30 lines) of first node." +echo "clean -- Remove all instances data, logs, configs." +echo "clean-logs -- Remove just instances logs." diff --git a/infra/scripts/helm/install-helm.sh b/infra/scripts/helm/install-helm.sh new file mode 100755 index 0000000000..a5073289df --- /dev/null +++ b/infra/scripts/helm/install-helm.sh @@ -0,0 +1,9 @@ +#!/usr/bin/env bash +set -e +readonly HELM_URL=https://storage.googleapis.com/kubernetes-helm +readonly HELM_TARBALL="helm-${HELM_VERSION}-linux-amd64.tar.gz" +readonly STABLE_REPO_URL=https://charts.helm.sh/stable +readonly INCUBATOR_REPO_URL=https://charts.helm.sh/incubator +curl -s "https://get.helm.sh/helm-${HELM_VERSION}-linux-amd64.tar.gz" | tar -C /tmp -xz +sudo mv /tmp/linux-amd64/helm /usr/bin/helm +helm repo add incubator "$INCUBATOR_REPO_URL" diff --git a/infra/scripts/helm/kafka-values.tpl.yaml b/infra/scripts/helm/kafka-values.tpl.yaml deleted file mode 100644 index 206323f337..0000000000 --- a/infra/scripts/helm/kafka-values.tpl.yaml +++ /dev/null @@ -1,18 +0,0 @@ -externalAccess: - enabled: true - service: - loadBalancerIPs: - - $feast_kafka_ip - annotations: - cloud.google.com/load-balancer-type: Internal - loadBalancerSourceRanges: - - 10.0.0.0/8 - - 172.16.0.0/12 - - 192.168.0.0/16 - -persistence: - enabled: false - -zookeeper: - persistence: - enabled: false \ No newline at end of file diff --git a/infra/scripts/helm/push-helm-charts.sh b/infra/scripts/helm/push-helm-charts.sh new file mode 100755 index 0000000000..08753adb3c --- /dev/null +++ b/infra/scripts/helm/push-helm-charts.sh @@ -0,0 +1,23 @@ +#!/usr/bin/env bash + +set -e + +if [ $# -ne 1 ]; then + echo "Please provide a single semver version (without a \"v\" prefix) to test the repository against, e.g 0.99.0" + exit 1 +fi + +bucket=gs://feast-helm-charts +repo_url=https://feast-helm-charts.storage.googleapis.com/ + +helm plugin install https://github.com/hayorov/helm-gcs.git --version 0.3.18 || true + +helm repo add feast-helm-chart-repo $bucket + +cd infra/charts +helm package feast +helm package feast-python-server + +helm gcs push --public feast-${1}.tgz feast-helm-chart-repo --force +helm gcs push --public feast-python-server-${1}.tgz feast-helm-chart-repo --force +rm -f ./*.tgz \ No newline at end of file diff --git a/infra/scripts/helm/redis-cluster-values.tpl.yaml b/infra/scripts/helm/redis-cluster-values.tpl.yaml deleted file mode 100644 index 5b15252495..0000000000 --- a/infra/scripts/helm/redis-cluster-values.tpl.yaml +++ /dev/null @@ -1,17 +0,0 @@ -cluster: - nodes: 3 - replicas: 0 - externalAccess: - enabled: true - service: - annotations: - cloud.google.com/load-balancer-type: Internal - loadBalancerIP: - - $feast_redis_1_ip - - $feast_redis_2_ip - - $feast_redis_3_ip - -persistence: - enabled: false - -usePassword: false \ No newline at end of file diff --git a/infra/scripts/helm/validate-helm-chart-publish.sh b/infra/scripts/helm/validate-helm-chart-publish.sh new file mode 100755 index 0000000000..e8a8f9b2fc --- /dev/null +++ b/infra/scripts/helm/validate-helm-chart-publish.sh @@ -0,0 +1,39 @@ +#!/usr/bin/env bash + +# Get project root +PROJECT_ROOT_DIR=$(git rev-parse --show-toplevel) + +# Should have no "latest" tags +grep -R "tag: latest" "$PROJECT_ROOT_DIR"/infra/charts || true +COUNT=$(grep -R "tag: latest" "$PROJECT_ROOT_DIR"/infra/charts | wc -l) + +if [ "$COUNT" -gt 0 ]; then + echo 'Found more than one instance of "latest" in an image tag. Please replace with correct release version.'; + exit 1 +else + echo 'No "latest" tags found, continuing'; +fi + +# TODO: Enable DockerHub vs GCR checks asap. + +## Should have no "gcr" images +#grep -R "gcr.io" "$PROJECT_ROOT_DIR"/infra/charts || true +#COUNT=$(grep -R "gcr.io" "$PROJECT_ROOT_DIR"/infra/charts | wc -l) +# +#if [ "$COUNT" -gt 0 ]; then +# echo 'Found more than one instance of "gcr.io" in charts. Please replace with https://hub.docker.com/r/feastdev feast image.'; +# exit 1 +#else +# echo 'No "gcr.io" instances found, continuing'; +#fi + +# Should have no "SNAPSHOT" versions +grep -R "SNAPSHOT" "$PROJECT_ROOT_DIR"/infra/charts || true +COUNT=$(grep -R "SNAPSHOT" "$PROJECT_ROOT_DIR"/infra/charts | wc -l) + +if [ "$COUNT" -gt 0 ]; then + echo 'Found more than one instance of "SNAPSHOT" in charts. Please ensure that no SNAPSHOT charts are published.'; + exit 1 +else + echo 'No "SNAPSHOT" instances found, continuing'; +fi \ No newline at end of file diff --git a/infra/scripts/helm/validate-helm-chart-versions.sh b/infra/scripts/helm/validate-helm-chart-versions.sh new file mode 100755 index 0000000000..0ba75bd744 --- /dev/null +++ b/infra/scripts/helm/validate-helm-chart-versions.sh @@ -0,0 +1,36 @@ +#!/usr/bin/env bash + +set -e + +# Amount of file locations that need to be bumped in unison when versions increment +UNIQUE_VERSIONS_COUNT=18 + +if [ $# -ne 1 ]; then + echo "Please provide a single semver version (without a \"v\" prefix) to test the repository against, e.g 0.99.0" + exit 1 +fi + +CHART_ROOT_DIR=$(git rev-parse --show-toplevel)/infra/charts + + +echo "Finding how many versions have been set to ${1} in the current repository" + +CHANGED_VERSIONS_COUNT=$(grep -R --exclude-dir='.*' ${1} ${CHART_ROOT_DIR} | wc -l) +echo "Found ${CHANGED_VERSIONS_COUNT} versions that have been changed" + +echo "This repository should contain ${UNIQUE_VERSIONS_COUNT} changed versions" + +if [ $UNIQUE_VERSIONS_COUNT -ne "${CHANGED_VERSIONS_COUNT}" ]; then + echo "We expected $UNIQUE_VERSIONS_COUNT to have been updated to the latest version, but only ${CHANGED_VERSIONS_COUNT} have. This number is statically defined based on a simple grep" + echo "Please confirm that all versions in all charts and requirements files have been bumped to the tagged release version. If you have successfully bumped all versions and there is still a mismatch in the expected and actual counts, then rerun the following command" + echo "grep -R 'insert_your_semver_version_here' . | wc -l" + echo "and update the script scripts/validate-helm-chart-versions.sh" + echo + echo For your reference, the following lines were detected as changed + echo + grep -R --exclude-dir='.*' ${1} ${CHART_ROOT_DIR} || true + echo + exit 1 +fi + +echo "All versions validated. Passing test." diff --git a/infra/scripts/publish-java-datatypes.sh b/infra/scripts/publish-java-datatypes.sh new file mode 100755 index 0000000000..55e5ce2f1b --- /dev/null +++ b/infra/scripts/publish-java-datatypes.sh @@ -0,0 +1,73 @@ +#!/usr/bin/env bash + +set -e +set -o pipefail + +GPG_KEY_IMPORT_DIR=/etc/gpg + +usage() +{ + echo "usage: publish-java-datatypes.sh + + --revision Value for the revision e.g. '0.2.3' + --gpg-key-import-dir Directory containing existing GPG keys to import. + The directory should contain these 2 files: + - public-key + - private-key + The default value is '/etc/gpg' + + This script assumes the GPG private key is protected by a passphrase. + The passphrase can be specified in \$HOME/.m2/settings.xml. In the same xml + file, credentials to upload releases to Sonatype must also be provided. + + # Example settings: ~/.m2/settings.xml + + + + ossrh + SONATYPE_USER + SONATYPE_PASSWORD + + + + + ossrh + + GPG_PASSPHRASE + + + + +" +} + +while [ "$1" != "" ]; do + case "$1" in + --revision ) REVISION="$2"; shift;; + --gpg-key-import-dir ) GPG_KEY_IMPORT_DIR="$2"; shift;; + -h | --help ) usage; exit;; + * ) usage; exit 1 + esac + shift +done + +if [ -z $REVISION ]; then usage; exit 1; fi + +echo "============================================================" +echo "Checking Maven and GPG versions" +echo "============================================================" +mvn -f java/pom.xml --version +echo "" +gpg --version + +echo "============================================================" +echo "Importing GPG keys" +echo "============================================================" +gpg --import --batch --yes $GPG_KEY_IMPORT_DIR/public-key +gpg --import --batch --yes $GPG_KEY_IMPORT_DIR/private-key + +echo "============================================================" +echo "Deploying Java Datatypes with revision: $REVISION" +echo "============================================================" +if [ -z $REVISION ]; then usage; exit 1; fi +mvn -f java/pom.xml --projects .,datatypes -Drevision=$REVISION --batch-mode clean deploy diff --git a/infra/scripts/publish-java-sdk.sh b/infra/scripts/publish-java-sdk.sh index 91123c8d4e..a59bc8fe67 100755 --- a/infra/scripts/publish-java-sdk.sh +++ b/infra/scripts/publish-java-sdk.sh @@ -15,7 +15,7 @@ usage() - public-key - private-key The default value is '/etc/gpg' - + This script assumes the GPG private key is protected by a passphrase. The passphrase can be specified in \$HOME/.m2/settings.xml. In the same xml file, credentials to upload releases to Sonatype must also be provided. @@ -56,7 +56,7 @@ if [ -z $REVISION ]; then usage; exit 1; fi echo "============================================================" echo "Checking Maven and GPG versions" echo "============================================================" -mvn --version +mvn -f java/pom.xml --version echo "" gpg --version @@ -69,4 +69,5 @@ gpg --import --batch --yes $GPG_KEY_IMPORT_DIR/private-key echo "============================================================" echo "Deploying Java SDK with revision: $REVISION" echo "============================================================" -mvn --projects datatypes/java,sdk/java -Drevision=$REVISION --batch-mode clean deploy +if [ -z $REVISION ]; then usage; exit 1; fi +mvn -f java/pom.xml --projects .,sdk -Drevision=$REVISION --batch-mode clean deploy diff --git a/infra/scripts/release-patch.sh b/infra/scripts/release-patch.sh new file mode 100755 index 0000000000..8b7973185e --- /dev/null +++ b/infra/scripts/release-patch.sh @@ -0,0 +1,264 @@ +#!/usr/bin/env bash +set -eo pipefail + + +usage() +{ + echo "usage: release-patch.sh + + This script is used to release a patch release. It is untested on major/minor releases and for those, some modification may be necessary. + + -v, --version version to release, example: 0.10.6 + -t, --github-token personal GitHub token + -r, --remote git remote server name for the feast-dev/feast repo (e.g. origin, upstream, etc.) +" +} + +while [ "$1" != "" ]; do + case "$1" in + -v | --version ) VERSION="$2"; shift;; + -t | --github-token ) GH_TOKEN="$2"; shift;; + -r | --remote ) REMOTE="$2"; shift;; + -h | --help ) usage; exit;; + * ) usage; exit 1 + esac + shift +done + +if [ -z $VERSION ]; then usage; exit 1; fi +if [ -z $GH_TOKEN ]; then usage; exit 1; fi +if [ -z $REMOTE ]; then usage; exit 1; fi +regex="([0-9]+)\.([0-9]+)\.([0-9]+)" +if [[ $VERSION =~ $regex ]] +then + MAJOR="${BASH_REMATCH[1]}" + MINOR="${BASH_REMATCH[2]}" + PATCH="${BASH_REMATCH[3]}" +else + usage + exit 1 +fi +if ! which gh ; then echo "Please install the GitHub CLI to use this script"; exit 1; fi + +echo "This script is mostly idempotent; check git status for temp files before restarting. It will always prompt you before making any non-local change." + +# Go to infra/scripts directory +cd $(dirname "$0") +# Login to GitHub CLI +echo $GH_TOKEN | gh auth login --with-token + +echo "Step 1: rebase new commits onto release branch" +git fetch $REMOTE +git checkout $REMOTE/master +STARTING_COMMIT=$(git merge-base $REMOTE/master v$MAJOR.$MINOR-branch) +git checkout v$MAJOR.$MINOR-branch + +push_rebased_commits() +{ + echo "Pushing commits" + git push $REMOTE v$MAJOR.$MINOR-branch + echo "Commits pushed" +} +rebase_from_master() +{ + echo "Rebasing commits" + git checkout $REMOTE/master + git rebase --interactive --onto v$MAJOR.$MINOR-branch $STARTING_COMMIT HEAD + git branch -f v$MAJOR.$MINOR-branch HEAD + git checkout v$MAJOR.$MINOR-branch + echo "Commits rebased" + echo "Step 1b: Push commits" + read -p "Commits are not pushed. Continue (y) or skip this sub-step (n)? " choice + case "$choice" in + y|Y ) push_rebased_commits ;; + * ) echo "Skipping this sub-step" ;; + esac ; +} +echo "Step 1a: rebase commits" +if git status | grep -q "is ahead of" ; then + read -p "Your local branch is ahead of its remote counterpart, indicating you may have already rebased. Skip this step (y) or run the rebase starting from commit $STARTING_COMMIT (n)? " choice + case "$choice" in + y|Y ) echo "Skipping this step" ;; + * ) rebase_from_master ;; + esac ; +else + read -p "Will rebase starting from commit $STARTING_COMMIT. Continue (y) or skip this step (n)? " choice + case "$choice" in + y|Y ) rebase_from_master ;; + * ) echo "Skipping this step" ;; + esac ; +fi + +CHANGELOG=$(git rev-parse --show-toplevel)/CHANGELOG.md + +commit_changelog() +{ + echo "Committing CHANGELOG.md" + git add $CHANGELOG + git commit -m "Update CHANGELOG for Feast v$MAJOR.$MINOR.$PATCH" +} +update_changelog() +{ + echo "Running changelog generator (will take up to a few minutes)" + echo -e "# Changelog\n" > temp \ + && docker run -it --rm ferrarimarco/github-changelog-generator \ + --user feast-dev \ + --project feast \ + --release-branch master \ + --future-release v$MAJOR.$MINOR.$PATCH \ + --unreleased-only \ + --no-issues \ + --bug-labels kind/bug \ + --enhancement-labels kind/feature \ + --breaking-labels compat/breaking \ + -t $GH_TOKEN \ + --max-issues 1 -o \ + | sed -n '/## \[v'"$MAJOR.$MINOR.$PATCH"'\]/,$p' \ + | sed '$d' | sed '$d' | sed '$d' | tr -d '\r' >> temp \ + && sed '1d' $CHANGELOG >> temp && mv temp $CHANGELOG + git diff $CHANGELOG + echo "Check CHANGELOG.md carefully and fix any errors. In particular, make sure the new enhancements/PRs/bugfixes aren't already listed somewhere lower down in the file." + read -p "Once you're done checking, continue to commit the changelog (y) or exit (n)? " choice + case "$choice" in + y|Y ) commit_changelog ;; + * ) exit ;; + esac ; +} +echo "Step 2: Updating CHANGELOG.md" +if grep -q "https://github.com/feast-dev/feast/tree/v$MAJOR.$MINOR.$PATCH" $CHANGELOG ; then + read -p "CHANGELOG.md appears updated. Skip this step (y/n)? " choice + case "$choice" in + y|Y ) echo "Skipping this step" ;; + * ) update_changelog ;; + esac ; +else + update_changelog ; +fi + +tag_commit() +{ + echo "Tagging commit" + git tag v$MAJOR.$MINOR.$PATCH + echo "Commit tagged" +} +echo "Step 3: Tag commit" +if git tag | grep -q "v$MAJOR.$MINOR.$PATCH" ; then + read -p "The tag already exists. Skip this step (y/n)? " choice + case "$choice" in + y|Y ) echo "Skipping this step" ;; + * ) tag_commit ;; + esac ; +else + tag_commit ; +fi + +echo "Step 4: Push commits and tags" +push_commits() +{ + echo "Pushing commits" + git push $REMOTE v$MAJOR.$MINOR-branch + echo "Commits pushed" +} +echo "Step 4a: Push commits" +if git status | grep -q "nothing to commit" ; then + echo "The commits appear pushed. Skipping this sub-step" +else + read -p "Commits are not pushed. Continue (y) or skip this sub-step (n)? " choice + case "$choice" in + y|Y ) push_commits ;; + * ) echo "Skipping this sub-step" ;; + esac ; +fi + +push_tag() +{ + echo "Pushing tag" + git push $REMOTE v$MAJOR.$MINOR.$PATCH + echo "Tag pushed" +} +echo "Step 4b: Push tag" +if git ls-remote --tags $REMOTE | grep -q "v$MAJOR.$MINOR.$PATCH" ; then + read -p "The tag appears pushed. Skip this sub-step (y/n)? " choice + case "$choice" in + y|Y ) echo "Skipping this sub-step" ;; + * ) push_tag ;; + esac ; +else + read -p "The tag is not pushed. Continue (y) or skip this sub-step (n)? " choice + case "$choice" in + y|Y ) push_tag ;; + * ) echo "Skipping this sub-step" ;; + esac ; +fi + +read -p "Now wait for the CI to pass. Continue (y) or exit and fix the problem (n)? " choice +case "$choice" in + y|Y ) echo "Moving on to the next step" ;; + * ) exit ;; +esac ; + +echo "Step 6: Add changelog to master" +changelog_hash=$(git rev-parse HEAD) +git checkout master +cp_changelog() +{ + echo "Cherry-picking" + git cherry-pick $changelog_hash + echo "Cherry-pick done" +} +echo "Step 6a: Cherry-pick changelog to master" +if grep -q "https://github.com/feast-dev/feast/tree/v$MAJOR.$MINOR.$PATCH" $CHANGELOG ; then + read -p "The changelog appears to be cherry-picked onto master. Skip this sub-step (y/n)? " choice + case "$choice" in + y|Y ) echo "Skipping this sub-step" ;; + * ) cp_changelog ;; + esac ; +else + read -p "The changelog does not appear to be cherry-picked onto master. Continue (y) or skip this sub-step (n)? " choice + case "$choice" in + y|Y ) cp_changelog ;; + * ) echo "Skipping this sub-step" ;; + esac ; +fi + +push_cp() +{ + echo "Pushing cherry-pick" + git push $REMOTE master + echo "Commit pushed" +} +echo "Step 6b: Push changelog to master" +if git status | grep -q "nothing to commit" ; then + echo "The commit appears pushed. Skipping this sub-step" +else + read -p "The commit is not pushed. Continue (y) or skip this sub-step (n)? " choice + case "$choice" in + y|Y ) push_cp ;; + * ) echo "Skipping this sub-step" ;; + esac ; +fi + +create_release() +{ + echo "Creating GitHub release" + cat $CHANGELOG | sed -n '/## \[v'"$MAJOR.$MINOR.$PATCH"'\]/,/## \[v'"$MAJOR.$MINOR.$(($PATCH-1))"'\]/p' | sed -n '/**Implemented enhancements/,$p' | sed '$d' > temp2 \ + && gh release create v$MAJOR.$MINOR.$PATCH -t "Feast v$MAJOR.$MINOR.$PATCH" --repo feast-dev/feast --notes-file temp2 \ + && rm temp2 + echo "GitHub release created" +} +echo "Step 7: Create a GitHub release" +if gh release list --repo feast-dev/feast | grep -q "v$MAJOR.$MINOR.$PATCH" ; then + read -p "GitHub release appears created. Skip this step (y/n)? " choice + case "$choice" in + y|Y ) echo "Skipping this step" ;; + * ) create_release ;; + esac ; +else + read -p "A GitHub release has not been created. Continue (y) or skip this step (n)? " choice + case "$choice" in + y|Y ) create_release ;; + * ) echo "Skipping this step" ;; + esac ; +fi + +echo "Step 8: Update the Upgrade Guide manually (docs/advanced/upgrading.md)" diff --git a/infra/scripts/release/bump_file_versions.py b/infra/scripts/release/bump_file_versions.py new file mode 100644 index 0000000000..81ab1329f6 --- /dev/null +++ b/infra/scripts/release/bump_file_versions.py @@ -0,0 +1,102 @@ +# This script will bump the versions found in files (charts, pom.xml) during the Feast release process. + +import pathlib +import sys + +USAGE = f"Usage: python {sys.argv[0]} [--help] | current_semver_version new_semver_version]" +VERSIONS_TO_BUMP = 28 + + +def main() -> None: + args = sys.argv[1:] + if not args or len(args) != 2: + raise SystemExit(USAGE) + + current_version = args[0].strip() + new_version = args[1].strip() + + if current_version == new_version: + raise SystemExit(f"Current and new versions are the same: {current_version} == {new_version}") + + # Validate that the input arguments are semver versions + if not is_semantic_version(current_version): + raise SystemExit(f"Current version is not a valid semantic version: {current_version}") + + if not is_semantic_version(new_version): + raise SystemExit(f"New version is not a valid semantic version: {new_version}") + + # Get git repo root directory + repo_root = pathlib.Path(__file__).resolve().parent.parent.parent.parent + path_to_file_list = repo_root.joinpath("infra", "scripts", "release", "files_to_bump.txt") + + # Get files to bump versions within + with open(path_to_file_list, "r") as f: + files_to_bump = f.read().splitlines() + + # The current version should be 0.18.0 or 0.19.0 or 0.20.0 etc, but we should also make sure to support the + # occasional patch release on the master branch like 0.18.1 or 0.18.2 + versions_in_files = 0 + if current_version[-2:] != ".0": + print(current_version[-2:]) + versions_in_files = count_version(current_version, files_to_bump, repo_root) + # if versions_in_files != VERSIONS_TO_BUMP: + # raise SystemExit(f"Found {versions_in_files} occurrences of {current_version} in files to bump, but " + # f"expected {VERSIONS_TO_BUMP}") + else: + found = False + + # Lets make sure the files don't contain a patch version (e.g, 0.x.0 -> 0.x.20) + for patch_version in range(0, 20): + current_version_patch = current_version[:-1] + str(patch_version) + versions_in_files = count_version(current_version_patch, files_to_bump, repo_root) + + # We are using a patch version, let's change our version number + if versions_in_files == VERSIONS_TO_BUMP: + print(f"Found {versions_in_files} occurrences of {current_version_patch}, changing current version to " + f"{current_version_patch}") + current_version = current_version_patch + found = True + break + else: + print(f"Found {versions_in_files} occurrences of {current_version_patch}, instead of {VERSIONS_TO_BUMP}") + if not found: + raise SystemExit(f"Could not find {VERSIONS_TO_BUMP} versions of {current_version} in {path_to_file_list}") + + print(f"Found {versions_in_files} occurrences of {current_version} in files to bump {path_to_file_list}") + + # Bump the version in the files + updated_count = 0 + for file in files_to_bump: + with open(repo_root.joinpath(file), "r") as f: + file_contents = f.read() + file_contents = file_contents.replace(current_version, new_version) + + with open(repo_root.joinpath(file), "w") as f: + f.write(file_contents) + updated_count += 1 + + print(f"Updated {updated_count} files with new version {new_version}") + + +def is_semantic_version(version: str) -> bool: + components = version.split(".") + if len(components) != 3: + return False + for component in components: + if not component.isdigit(): + return False + return True + + +def count_version(current_version, files_to_bump, repo_root): + # Count how many of the existing versions we find + total = 0 + for file in files_to_bump: + with open(repo_root.joinpath(file), "r") as f: + file_contents = f.read() + total += file_contents.count(current_version) + return total + + +if __name__ == "__main__": + main() diff --git a/infra/scripts/release/files_to_bump.txt b/infra/scripts/release/files_to_bump.txt new file mode 100644 index 0000000000..2c3eece6be --- /dev/null +++ b/infra/scripts/release/files_to_bump.txt @@ -0,0 +1,13 @@ +infra/charts/feast/requirements.yaml +infra/charts/feast/Chart.yaml +infra/charts/feast/charts/transformation-service/Chart.yaml +infra/charts/feast/charts/transformation-service/README.md +infra/charts/feast/charts/transformation-service/values.yaml +infra/charts/feast/charts/feature-server/Chart.yaml +infra/charts/feast/charts/feature-server/README.md +infra/charts/feast/charts/feature-server/values.yaml +infra/charts/feast/README.md +infra/charts/feast-python-server/Chart.yaml +infra/charts/feast-python-server/README.md +java/pom.xml +ui/package.json \ No newline at end of file diff --git a/infra/scripts/release/unset_prerelease.py b/infra/scripts/release/unset_prerelease.py new file mode 100644 index 0000000000..4a2ba13197 --- /dev/null +++ b/infra/scripts/release/unset_prerelease.py @@ -0,0 +1,70 @@ +# For some reason patch releases with Semantic Release are tagged as "pre-release" on GitHub. This script +# removes the "pre-release" tag from the release. +import os +import sys +import requests + +USAGE = f"Usage: python {sys.argv[0]} [--help] | version_being_released (e.g., v0.19.1)]" + + +def get_prerelease_status(version_being_released, token): + url = f"https://api.github.com/repos/feast-dev/feast/releases/tags/v{version_being_released}" + + headers = { + "Content-Type": "application/json", + "Accept": "application/vnd.github.v3+json", + "Authorization": f"Bearer {token}" + } + + response = requests.request("GET", url, headers=headers) + response_json = response.json() + return bool(response_json['prerelease']), response_json['id'] + + +def set_prerelease_status(release_id, status, token): + url = f"https://api.github.com/repos/feast-dev/feast/releases/{release_id}" + + payload = {"prerelease": status} + + headers = { + "Content-Type": "application/json", + "Accept": "application/vnd.github.v3+json", + "Authorization": f"Bearer {token}" + } + + requests.request("PATCH", url, json=payload, headers=headers) + + +def main() -> None: + args = sys.argv[1:] + if not args or len(args) != 1: + raise SystemExit(USAGE) + + version_being_released = args[0].strip() # should look like 0.19.1 (without the v) + + print(f"Disabling prerelease status for {version_being_released}") + + token = os.getenv('GITHUB_TOKEN', default=None) + + if token is None: + raise OSError("GITHUB_TOKEN environmental variable is not set") + + is_prerelease, release_id = get_prerelease_status(version_being_released, token) + + if is_prerelease: + set_prerelease_status(release_id, False, token) + else: + print(f"{version_being_released} is not a pre-release, exiting.") + exit(0) + + is_prerelease, release_id = get_prerelease_status(version_being_released, token) + + if is_prerelease: + import warnings + warnings.warn(f"Failed to unset prerelease status for {version_being_released} release id {release_id}") + else: + print(f"Successfully unset prerelease status for {version_being_released} release id {release_id}") + + +if __name__ == "__main__": + main() diff --git a/infra/scripts/test-end-to-end.sh b/infra/scripts/test-end-to-end.sh index 1c94f9c02d..9f086d0d8c 100755 --- a/infra/scripts/test-end-to-end.sh +++ b/infra/scripts/test-end-to-end.sh @@ -7,7 +7,7 @@ infra/scripts/download-maven-cache.sh --archive-uri ${MAVEN_CACHE} --output-dir apt-get update && apt-get install -y redis-server postgresql libpq-dev make build-java-no-tests REVISION=develop -python -m pip install --upgrade pip setuptools wheel +python -m pip install --upgrade pip setuptools wheel pip-tools make install-python python -m pip install -qr tests/requirements.txt export FEAST_USAGE="False" diff --git a/infra/scripts/test-golang-sdk.sh b/infra/scripts/test-golang-sdk.sh deleted file mode 100755 index 666f6c12d0..0000000000 --- a/infra/scripts/test-golang-sdk.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/usr/bin/env bash - -set -o pipefail - -make lint-go - -cd sdk/go -go test -v 2>&1 | tee /tmp/test_output -TEST_EXIT_CODE=$? - -# Default artifact location setting in Prow jobs -LOGS_ARTIFACT_PATH=/logs/artifacts - -go get -u github.com/jstemmer/go-junit-report -cat /tmp/test_output | ${GOPATH}/bin/go-junit-report > ${LOGS_ARTIFACT_PATH}/golang-sdk-test-report.xml - -exit ${TEST_EXIT_CODE} \ No newline at end of file diff --git a/infra/scripts/test-integration.sh b/infra/scripts/test-integration.sh index 5e88e0281b..6f50dd1450 100755 --- a/infra/scripts/test-integration.sh +++ b/infra/scripts/test-integration.sh @@ -1,6 +1,6 @@ #!/usr/bin/env bash -python -m pip install --upgrade pip setuptools wheel +python -m pip install --upgrade pip setuptools wheel pip-tools make install-python python -m pip install -qr tests/requirements.txt diff --git a/infra/scripts/test-core-ingestion.sh b/infra/scripts/test-java-core-ingestion.sh similarity index 82% rename from infra/scripts/test-core-ingestion.sh rename to infra/scripts/test-java-core-ingestion.sh index d3b42926d8..a812edc616 100755 --- a/infra/scripts/test-core-ingestion.sh +++ b/infra/scripts/test-java-core-ingestion.sh @@ -11,9 +11,9 @@ infra/scripts/download-maven-cache.sh \ # Core depends on Ingestion so they are tested together # Skip Maven enforcer: https://stackoverflow.com/questions/50647223/maven-enforcer-issue-when-running-from-reactor-level -mvn --projects core,ingestion --batch-mode --define skipTests=true \ +mvn -f java/pom.xml --projects core,ingestion --batch-mode --define skipTests=true \ --define enforcer.skip=true clean install -mvn --projects core,ingestion --define enforcer.skip=true test +mvn -f java/pom.xml --projects core,ingestion --define enforcer.skip=true test TEST_EXIT_CODE=$? # Default artifact location setting in Prow jobs diff --git a/infra/scripts/test-java-sdk.sh b/infra/scripts/test-java-sdk.sh index 0731b77976..58dc5b1a83 100755 --- a/infra/scripts/test-java-sdk.sh +++ b/infra/scripts/test-java-sdk.sh @@ -1,13 +1,13 @@ #!/usr/bin/env bash # Skip Maven enforcer: https://stackoverflow.com/questions/50647223/maven-enforcer-issue-when-running-from-reactor-level -mvn --projects sdk/java --batch-mode --define skipTests=true \ +mvn -f java/pom.xml --projects sdk/java --batch-mode --define skipTests=true \ --define enforcer.skip=true clean install -mvn --projects sdk/java --define enforcer.skip=true test +mvn -f java/pom.xml --projects sdk/java --define enforcer.skip=true test TEST_EXIT_CODE=$? # Default artifact location setting in Prow jobs LOGS_ARTIFACT_PATH=/logs/artifacts -cp -r sdk/java/target/surefire-reports ${LOGS_ARTIFACT_PATH}/surefire-reports +cp -r java/sdk/java/target/surefire-reports ${LOGS_ARTIFACT_PATH}/surefire-reports exit ${TEST_EXIT_CODE} \ No newline at end of file diff --git a/infra/scripts/test-serving.sh b/infra/scripts/test-java-serving.sh similarity index 83% rename from infra/scripts/test-serving.sh rename to infra/scripts/test-java-serving.sh index ce9dc0a816..755722e7a8 100755 --- a/infra/scripts/test-serving.sh +++ b/infra/scripts/test-java-serving.sh @@ -4,7 +4,7 @@ infra/scripts/download-maven-cache.sh \ --archive-uri gs://feast-templocation-kf-feast/.m2.2019-10-24.tar \ --output-dir /root/ -mvn --batch-mode --also-make --projects serving test +mvn -f java/pom.xml --batch-mode --also-make --projects serving test TEST_EXIT_CODE=$? # Default artifact location setting in Prow jobs diff --git a/infra/scripts/validate-release.sh b/infra/scripts/validate-release.sh new file mode 100755 index 0000000000..3ccc4f4f15 --- /dev/null +++ b/infra/scripts/validate-release.sh @@ -0,0 +1,30 @@ +# This script ensures that we don't accidentally cut the wrong kind of release on master or release branches + +if [ "$#" -ne 2 ] +then + echo "Usage: validate-release.sh [major, minor, patch] branch" + echo "Example: validate-release.sh patch master" + exit 1 +fi + +if [ "$1" = "minor" ]; then + if [ "$2" = "master" ]; then + echo "Releasing a minor version on master, looks good!" + exit 0 + else + echo "Can't release a minor version from a non-master branch! Please confirm the version you are releasing!!" + exit 1 + fi +elif [ "$1" = "patch" ]; then + if [ "$2" = "master" ]; then + echo "Can't release a patch version from master branch! Please confirm the version you are releasing!!" + exit 1 + else + echo "Releasing a patch version from a non-master branch, looks good!" + exit 0 + fi +else + echo "Not sure what kind of release is happening. Please confirm that you are creating a minor release from master + or a patch from a release branch" + exit 1 +fi \ No newline at end of file diff --git a/infra/templates/README.md.jinja2 b/infra/templates/README.md.jinja2 new file mode 100644 index 0000000000..9d3b5649ce --- /dev/null +++ b/infra/templates/README.md.jinja2 @@ -0,0 +1,156 @@ +

+ + + +

+
+ +[![unit-tests](https://github.com/feast-dev/feast/actions/workflows/unit_tests.yml/badge.svg?branch=master&event=push)](https://github.com/feast-dev/feast/actions/workflows/unit_tests.yml) +[![integration-tests-and-build](https://github.com/feast-dev/feast/actions/workflows/master_only.yml/badge.svg?branch=master&event=push)](https://github.com/feast-dev/feast/actions/workflows/master_only.yml) +[![java-integration-tests](https://github.com/feast-dev/feast/actions/workflows/java_master_only.yml/badge.svg?branch=master&event=push)](https://github.com/feast-dev/feast/actions/workflows/java_master_only.yml) +[![linter](https://github.com/feast-dev/feast/actions/workflows/linter.yml/badge.svg?branch=master&event=push)](https://github.com/feast-dev/feast/actions/workflows/linter.yml) +[![Docs Latest](https://img.shields.io/badge/docs-latest-blue.svg)](https://docs.feast.dev/) +[![Python API](https://img.shields.io/readthedocs/feast/master?label=Python%20API)](http://rtd.feast.dev/) +[![License](https://img.shields.io/badge/License-Apache%202.0-blue)](https://github.com/feast-dev/feast/blob/master/LICENSE) +[![GitHub Release](https://img.shields.io/github/v/release/feast-dev/feast.svg?style=flat&sort=semver&color=blue)](https://github.com/feast-dev/feast/releases) + +## Overview + +Feast is an open source feature store for machine learning. Feast is the fastest path to productionizing analytic data for model training and online inference. + +Please see our [documentation](https://docs.feast.dev/) for more information about the project. + +## 📐 Architecture +![](docs/assets/feast-marchitecture.png) + +The above architecture is the minimal Feast deployment. Want to run the full Feast on Snowflake/GCP/AWS? Click [here](https://docs.feast.dev/how-to-guides/feast-snowflake-gcp-aws). + +## 🐣 Getting Started + +### 1. Install Feast +```commandline +pip install feast +``` + +### 2. Create a feature repository +```commandline +feast init my_feature_repo +cd my_feature_repo +``` + +### 3. Register your feature definitions and set up your feature store +```commandline +feast apply +``` + +### 4. Explore your data in the web UI (experimental) + +![Web UI](ui/sample.png) + +### 5. Build a training dataset +```python +from feast import FeatureStore +import pandas as pd +from datetime import datetime + +entity_df = pd.DataFrame.from_dict({ + "driver_id": [1001, 1002, 1003, 1004], + "event_timestamp": [ + datetime(2021, 4, 12, 10, 59, 42), + datetime(2021, 4, 12, 8, 12, 10), + datetime(2021, 4, 12, 16, 40, 26), + datetime(2021, 4, 12, 15, 1 , 12) + ] +}) + +store = FeatureStore(repo_path=".") + +training_df = store.get_historical_features( + entity_df=entity_df, + features = [ + 'driver_hourly_stats:conv_rate', + 'driver_hourly_stats:acc_rate', + 'driver_hourly_stats:avg_daily_trips' + ], +).to_df() + +print(training_df.head()) + +# Train model +# model = ml.fit(training_df) +``` +```commandline + event_timestamp driver_id conv_rate acc_rate avg_daily_trips +0 2021-04-12 08:12:10+00:00 1002 0.713465 0.597095 531 +1 2021-04-12 10:59:42+00:00 1001 0.072752 0.044344 11 +2 2021-04-12 15:01:12+00:00 1004 0.658182 0.079150 220 +3 2021-04-12 16:40:26+00:00 1003 0.162092 0.309035 959 + +``` + +### 6. Load feature values into your online store +```commandline +CURRENT_TIME=$(date -u +"%Y-%m-%dT%H:%M:%S") +feast materialize-incremental $CURRENT_TIME +``` + +```commandline +Materializing feature view driver_hourly_stats from 2021-04-14 to 2021-04-15 done! +``` + +### 7. Read online features at low latency +```python +from pprint import pprint +from feast import FeatureStore + +store = FeatureStore(repo_path=".") + +feature_vector = store.get_online_features( + features=[ + 'driver_hourly_stats:conv_rate', + 'driver_hourly_stats:acc_rate', + 'driver_hourly_stats:avg_daily_trips' + ], + entity_rows=[{"driver_id": 1001}] +).to_dict() + +pprint(feature_vector) + +# Make prediction +# model.predict(feature_vector) +``` +```json +{ + "driver_id": [1001], + "driver_hourly_stats__conv_rate": [0.49274], + "driver_hourly_stats__acc_rate": [0.92743], + "driver_hourly_stats__avg_daily_trips": [72] +} +``` + +## 📦 Functionality and Roadmap + +{{ roadmap_contents }} + +## 🎓 Important Resources + +Please refer to the official documentation at [Documentation](https://docs.feast.dev/) + * [Quickstart](https://docs.feast.dev/getting-started/quickstart) + * [Tutorials](https://docs.feast.dev/tutorials/tutorials-overview) + * [Running Feast with Snowflake/GCP/AWS](https://docs.feast.dev/how-to-guides/feast-snowflake-gcp-aws) + * [Change Log](https://github.com/feast-dev/feast/blob/master/CHANGELOG.md) + * [Slack (#Feast)](https://slack.feast.dev/) + +## 👋 Contributing +Feast is a community project and is still under active development. Please have a look at our contributing and development guides if you want to contribute to the project: +- [Contribution Process for Feast](https://docs.feast.dev/project/contributing) +- [Development Guide for Feast](https://docs.feast.dev/project/development-guide) +- [Development Guide for the Main Feast Repository](./CONTRIBUTING.md) + +## ✨ Contributors + +Thanks goes to these incredible people: + + + + diff --git a/infra/terraform/azure/helm.tf b/infra/terraform/azure/helm.tf index c1f85a3699..b37b3826e1 100644 --- a/infra/terraform/azure/helm.tf +++ b/infra/terraform/azure/helm.tf @@ -82,7 +82,8 @@ resource "helm_release" "feast" { name = var.name_prefix namespace = var.aks_namespace - chart = "../../charts/feast" + repository = "https://feast-helm-charts.storage.googleapis.com" + chart = "feast" values = [ yamlencode(local.feast_helm_values) diff --git a/java/.gitignore b/java/.gitignore new file mode 100644 index 0000000000..0d767e93ae --- /dev/null +++ b/java/.gitignore @@ -0,0 +1,191 @@ +### Scratch files ### +scratch* + +### Feast UI deprecated folder #### +ui/ + +### Local Environment ### +*local*.env + +### Secret ### +**/service_account.json + +### Gradle ### +**/.gradle +!gradle/wrapper/gradle-wrapper.jar + +### STS ### +.apt_generated +.classpath +.factorypath +.project +.settings +.springBeans +.sts4-cache + +### IntelliJ IDEA ### +.idea +*.iws +*.iml +*.ipr +/out/ + +### NetBeans ### +/nbproject/private/ +/nbbuild/ +/dist/ +/nbdist/ +/.nb-gradle/ + +## Build Files ## +**/temp/ +**/build/ +core/target/ +**/bin/ +dependency-reduced-pom.xml + +# govendor +vendor + +## direnv +.envrc +.direnv + +.terraform/ +*.tfvars + +# python +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class +*.prof + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +classes/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# celery beat schedule file +celerybeat-schedule + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ +.vscode + +# .flattened-pom.xml is generated by flatten-maven-plugin. +# This pom should not be committed because it is only used during release / deployment. +.flattened-pom.xml + +sdk/python/docs/html + +# Generated python code +*_pb2.py +*_pb2.pyi +*_pb2_grpc.py + +# VSCode +.bloop +.metals +*.code-workspace diff --git a/java/CONTRIBUTING.md b/java/CONTRIBUTING.md new file mode 100644 index 0000000000..86eacfef41 --- /dev/null +++ b/java/CONTRIBUTING.md @@ -0,0 +1,87 @@ +# Development Guide: feast-java +> The higher level [Development Guide](https://docs.feast.dev/contributing/development-guide) +> gives contributing to Feast codebase as a whole. + +### Overview +This guide is targeted at developers looking to contribute to Feast components in +the feast-java Repository: +- [Feast Serving](#feast-serving) +- [Feast Java Client](#feast-java-client) + +> Don't see the Feast component that you want to contribute to here? +> Check out the [Development Guide](https://docs.feast.dev/contributing/development-guide) +> to learn how Feast components are distributed over multiple repositories. + +#### Common Setup +Common Environment Setup for all feast-java Feast components: + +Ensure following development tools are installed: +- Java SE Development Kit 11 +- Maven 3.6 +- `make` + +#### Code Style +Feast's Java codebase conforms to the [Google Java Style Guide](https://google.github.io/styleguide/javaguide.html). + +Automatically format the code to conform the style guide by: + +```sh +# formats all code in the feast-java repository +mvn spotless:apply +``` + +> If you're using IntelliJ, you can import these [code style settings](https://github.com/google/styleguide/blob/gh-pages/intellij-java-google-style.xml) +> if you'd like to use the IDE's reformat function. + +#### Project Makefile +The Project Makefile provides useful shorthands for common development tasks: + + +Run all Unit tests: +``` +make test-java +``` + +Run all Integration tests: +``` +make test-java-integration +``` + +Building Docker images for Feast Core & Feast Serving: +``` +make build-docker REGISTRY=gcr.io/kf-feast VERSION=develop +``` + + +#### IDE Setup +If you're using IntelliJ, some additional steps may be needed to make sure IntelliJ autocomplete works as expected. +Specifically, proto-generated code is not indexed by IntelliJ. To fix this, navigate to the following window in IntelliJ: +`Project Structure > Modules > datatypes-java`, and mark the following folders as `Source` directorys: +- target/generated-sources/protobuf/grpc-java +- target/generated-sources/protobuf/java +- target/generated-sources/annotations + +## Feast Serving +See instructions [here](serving/README.md) for developing. + +## Feast Java Client +### Environment Setup +Setting up your development environment for Feast Java SDK: +1. Complete the feast-java [Common Setup](#common-setup) + +> Feast Java Client is a Java Client for retrieving Features from a running Feast Serving instance. +> See the [Feast Serving Section](#feast-serving) section for how to get a Feast Serving instance running. + +### Building +1. Build / Compile Feast Java Client with Maven: + +```sh +mvn package -pl sdk/java --also-make -Dmaven.test.skip=true +``` + +### Unit Tests +Unit Tests can be used to verify functionality: + +```sh +mvn package -pl sdk/java test --also-make +``` diff --git a/java/README.md b/java/README.md new file mode 100644 index 0000000000..ff5a1b8553 --- /dev/null +++ b/java/README.md @@ -0,0 +1,21 @@ +# Feast Java components + +### Overview + +This repository contains the following Feast components. +* Feast Serving: A service used to serve the latest feature values to models. +* Feast Java SDK: A client used to retrieve features from Feast Serving. + +### Architecture + +Feast Serving has a dependency on an online store (Redis) for retrieving features. +The process of ingesting data into the online store (Redis) is decoupled from the process of reading from it. + +### Contributing +Guides on Contributing: +- [Contribution Process for Feast](https://docs.feast.dev/v/master/contributing/contributing) +- [Development Guide for Feast](https://docs.feast.dev/contributing/development-guide) +- [Development Guide for feast-java (this repository)](CONTRIBUTING.md) + +### Installing using Helm +Please see the Helm charts in [infra/charts/feast](../infra/charts/feast). diff --git a/java/common/pom.xml b/java/common/pom.xml new file mode 100644 index 0000000000..6b580880f1 --- /dev/null +++ b/java/common/pom.xml @@ -0,0 +1,162 @@ + + + + 4.0.0 + + + feast-parent + dev.feast + ${revision} + + + Feast Common + Feast common module with functionality that can be reused + feast-common + + + + dev.feast + feast-datatypes + ${project.version} + compile + + + com.google.protobuf + protobuf-java-util + ${protobuf.version} + + + + org.apache.commons + commons-lang3 + 3.6 + + + + + org.projectlombok + lombok + ${lombok.version} + + + com.google.auto.value + auto-value-annotations + ${auto.value.version} + + + + + com.google.code.gson + gson + ${gson.version} + + + io.gsonfire + gson-fire + ${gson.fire.version} + + + com.fasterxml.jackson.core + jackson-databind + 2.12.6.1 + + + com.fasterxml.jackson.datatype + jackson-datatype-jsr310 + ${jackson.version} + + + + + org.slf4j + slf4j-api + + + org.fluentd + fluent-logger + 0.3.1 + + + + javax.xml.bind + jaxb-api + + + javax.validation + validation-api + + + + + com.google.code.findbugs + jsr305 + 3.0.2 + + + + + org.hamcrest + hamcrest-library + test + ${hamcrest.version} + + + + junit + junit + 4.13.2 + + + org.mockito + mockito-core + ${mockito.version} + test + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + + + + + org.jacoco + jacoco-maven-plugin + + + org.apache.maven.plugins + maven-surefire-plugin + 3.0.0-M4 + + -Xms2048m -Xmx2048m -Djdk.net.URLClassPath.disableClassPathURLCheck=true + + + + org.sonatype.plugins + nexus-staging-maven-plugin + + true + + + + + diff --git a/java/common/src/main/java/feast/common/logging/AuditLogger.java b/java/common/src/main/java/feast/common/logging/AuditLogger.java new file mode 100644 index 0000000000..f3538a794b --- /dev/null +++ b/java/common/src/main/java/feast/common/logging/AuditLogger.java @@ -0,0 +1,168 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2020 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.common.logging; + +import com.google.protobuf.InvalidProtocolBufferException; +import com.google.protobuf.util.JsonFormat; +import feast.common.logging.config.LoggingProperties; +import feast.common.logging.config.LoggingProperties.AuditLogProperties; +import feast.common.logging.entry.*; +import feast.common.logging.entry.LogResource.ResourceType; +import java.net.InetAddress; +import java.net.UnknownHostException; +import java.util.HashMap; +import java.util.Map; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.StringUtils; +import org.fluentd.logger.FluentLogger; +import org.slf4j.Marker; +import org.slf4j.MarkerFactory; +import org.slf4j.event.Level; + +@Slf4j +public class AuditLogger { + private static final String FLUENTD_DESTINATION = "fluentd"; + private static final Marker AUDIT_MARKER = MarkerFactory.getMarker("AUDIT_MARK"); + private static FluentLogger fluentLogger; + private static AuditLogProperties properties; + private static String artifact; + private static String version; + + public AuditLogger(LoggingProperties loggingProperties, String artifact, String version) { + // Spring runs this constructor when creating the AuditLogger bean, + // which allows us to populate the AuditLogger class with dependencies. + // This allows us to use the dependencies in the AuditLogger's static methods + AuditLogger.properties = loggingProperties.getAudit(); + AuditLogger.artifact = artifact; + AuditLogger.version = version; + if (AuditLogger.properties.getMessageLogging() != null + && AuditLogger.properties.getMessageLogging().isEnabled()) { + AuditLogger.fluentLogger = + FluentLogger.getLogger( + "feast", + AuditLogger.properties.getMessageLogging().getFluentdHost(), + AuditLogger.properties.getMessageLogging().getFluentdPort()); + } + } + + /** + * Log the handling of a Protobuf message by a service call. + * + * @param level log level + * @param entryBuilder with all fields set except instance. + */ + public static void logMessage(Level level, MessageAuditLogEntry.Builder entryBuilder) { + log(level, entryBuilder.setComponent(artifact).setVersion(version).build()); + } + + /** + * Log an action being taken on a specific resource + * + * @param level describing the severity of the log. + * @param action name of the action being taken on specific resource. + * @param resourceType the type of resource being logged. + * @param resourceId resource specific identifier identifing the instance of the resource. + */ + public static void logAction( + Level level, String action, ResourceType resourceType, String resourceId) { + log( + level, + ActionAuditLogEntry.of( + artifact, version, LogResource.of(resourceType, resourceId), action)); + } + + /** + * Log a transition in state/status in a specific resource. + * + * @param level describing the severity of the log. + * @param status name of end status which the resource transition to. + * @param resourceType the type of resource being logged. + * @param resourceId resource specific identifier identifing the instance of the resource. + */ + public static void logTransition( + Level level, String status, ResourceType resourceType, String resourceId) { + log( + level, + TransitionAuditLogEntry.of( + artifact, version, LogResource.of(resourceType, resourceId), status)); + } + + /** + * Log given {@link AuditLogEntry} at the given logging {@link Level} to the Audit log. + * + * @param level describing the severity of the log. + * @param entry the {@link AuditLogEntry} to push to the audit log. + */ + private static void log(Level level, AuditLogEntry entry) { + // Check if audit logging is of this specific log entry enabled. + if (!properties.isEnabled()) { + return; + } + + // Either forward log to logging layer or log to console + String destination = properties.getMessageLogging().getDestination(); + if (destination.equals(FLUENTD_DESTINATION)) { + if (entry.getKind() == AuditLogEntryKind.MESSAGE) { + Map fluentdLogs = new HashMap<>(); + MessageAuditLogEntry messageAuditLogEntry = (MessageAuditLogEntry) entry; + String releaseName; + + try { + releaseName = + StringUtils.defaultIfEmpty( + System.getenv("RELEASE_NAME"), InetAddress.getLocalHost().getHostAddress()); + } catch (UnknownHostException e) { + releaseName = StringUtils.defaultIfEmpty(System.getenv("RELEASE_NAME"), ""); + } + + fluentdLogs.put("id", messageAuditLogEntry.getId()); + fluentdLogs.put("identity", messageAuditLogEntry.getIdentity()); + fluentdLogs.put("service", messageAuditLogEntry.getService()); + fluentdLogs.put("status_code", messageAuditLogEntry.getStatusCode()); + fluentdLogs.put("method", messageAuditLogEntry.getMethod()); + fluentdLogs.put("release_name", releaseName); + try { + fluentdLogs.put("request", JsonFormat.printer().print(messageAuditLogEntry.getRequest())); + fluentdLogs.put( + "response", JsonFormat.printer().print(messageAuditLogEntry.getResponse())); + } catch (InvalidProtocolBufferException e) { + } + fluentLogger.log("fluentd", fluentdLogs); + } + } else { + // Log event to audit log through enabled formats + String entryJSON = entry.toJSON(); + switch (level) { + case TRACE: + log.trace(AUDIT_MARKER, entryJSON); + break; + case DEBUG: + log.debug(AUDIT_MARKER, entryJSON); + break; + case INFO: + log.info(AUDIT_MARKER, entryJSON); + break; + case WARN: + log.warn(AUDIT_MARKER, entryJSON); + break; + case ERROR: + log.error(AUDIT_MARKER, entryJSON); + break; + } + } + } +} diff --git a/java/common/src/main/java/feast/common/logging/config/LoggingProperties.java b/java/common/src/main/java/feast/common/logging/config/LoggingProperties.java new file mode 100644 index 0000000000..06e62f71af --- /dev/null +++ b/java/common/src/main/java/feast/common/logging/config/LoggingProperties.java @@ -0,0 +1,54 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.common.logging.config; + +import feast.common.validators.OneOfStrings; +import javax.validation.constraints.NotNull; +import lombok.Getter; +import lombok.Setter; + +@Getter +@Setter +public class LoggingProperties { + @NotNull private AuditLogProperties audit; + + @Getter + @Setter + public static class AuditLogProperties { + // Whether to enable/disable audit logging entirely. + private boolean enabled; + + private MessageLogging messageLogging; + + @Getter + @Setter + public static class MessageLogging { + // Whether to enable/disable message level (ie request/response) audit logging. + private boolean enabled; + + // Whether to log to console or fluentd + @OneOfStrings({"console", "fluentd"}) + private String destination; + + // fluentD service host for external (request/response) logging. + private String fluentdHost; + + // fluentD service port for external (request/response) logging. + private Integer fluentdPort; + } + } +} diff --git a/java/common/src/main/java/feast/common/logging/entry/ActionAuditLogEntry.java b/java/common/src/main/java/feast/common/logging/entry/ActionAuditLogEntry.java new file mode 100644 index 0000000000..4fdeaee32a --- /dev/null +++ b/java/common/src/main/java/feast/common/logging/entry/ActionAuditLogEntry.java @@ -0,0 +1,44 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2020 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.common.logging.entry; + +import com.google.auto.value.AutoValue; + +/** ActionAuditLogEntry records an action being taken on a specific resource */ +@AutoValue +public abstract class ActionAuditLogEntry extends AuditLogEntry { + /** @return The name of the action taken on the resource. */ + public abstract String getAction(); + + /** @return The target resource of which the action was taken on. */ + public abstract LogResource getResource(); + + /** + * Create an {@link AuditLogEntry} that records an action being taken on a specific resource. + * + * @param component The name of th Feast component producing this {@link AuditLogEntry}. + * @param version The version of Feast producing this {@link AuditLogEntry}. + * @param resource The target resource of which the action was taken on. + * @param action The name of the action being taken on the given resource. + * @return log entry that records an action being taken on a specific resource + */ + public static ActionAuditLogEntry of( + String component, String version, LogResource resource, String action) { + return new AutoValue_ActionAuditLogEntry( + component, version, AuditLogEntryKind.ACTION, action, resource); + } +} diff --git a/java/common/src/main/java/feast/common/logging/entry/AuditLogEntry.java b/java/common/src/main/java/feast/common/logging/entry/AuditLogEntry.java new file mode 100644 index 0000000000..8148c474b0 --- /dev/null +++ b/java/common/src/main/java/feast/common/logging/entry/AuditLogEntry.java @@ -0,0 +1,57 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.common.logging.entry; + +import com.google.gson.Gson; + +/** + * AuditLogEntry represents a single audit Log Entry. Audit log entry can converted into string with + * {{@link #toString()} for human readable representation. Or structured JSON with {{@link + * #toJSON()} for a machine parsable representation. + */ +public abstract class AuditLogEntry { + /** Declare Log Type to allow external Logging systems to filter out {@link AuditLogEntry} */ + public final String logType = "FeastAuditLogEntry"; + + public final String application = "Feast"; + + /** + * The name of the Feast component producing this {@link AuditLogEntry} + * + * @return the component + */ + public abstract String getComponent(); + + /** + * The version of Feast producing this {@link AuditLogEntry} + * + * @return version + */ + public abstract String getVersion(); + + public abstract AuditLogEntryKind getKind(); + + /** + * Return a structured JSON representation of this {@link AuditLogEntry} + * + * @return structured JSON representation + */ + public String toJSON() { + Gson gson = new Gson(); + return gson.toJson(this); + } +} diff --git a/java/common/src/main/java/feast/common/logging/entry/AuditLogEntryKind.java b/java/common/src/main/java/feast/common/logging/entry/AuditLogEntryKind.java new file mode 100644 index 0000000000..d673f6bdb3 --- /dev/null +++ b/java/common/src/main/java/feast/common/logging/entry/AuditLogEntryKind.java @@ -0,0 +1,24 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.common.logging.entry; + +/** AuditLogEntryKind lists the various kinds of {@link AuditLogEntry} */ +public enum AuditLogEntryKind { + MESSAGE, + ACTION, + TRANSITION, +} diff --git a/java/common/src/main/java/feast/common/logging/entry/LogResource.java b/java/common/src/main/java/feast/common/logging/entry/LogResource.java new file mode 100644 index 0000000000..1d0345a404 --- /dev/null +++ b/java/common/src/main/java/feast/common/logging/entry/LogResource.java @@ -0,0 +1,39 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.common.logging.entry; + +import com.google.auto.value.AutoValue; + +@AutoValue +/** + * LogResource is used in {@link AuditLogEntry} to reference a specific resource as the subject of + * the log + */ +public abstract class LogResource { + public enum ResourceType { + JOB, + FEATURE_TABLE + } + + public abstract ResourceType getType(); + + public abstract String getId(); + + public static LogResource of(ResourceType type, String id) { + return new AutoValue_LogResource(type, id); + } +} diff --git a/java/common/src/main/java/feast/common/logging/entry/MessageAuditLogEntry.java b/java/common/src/main/java/feast/common/logging/entry/MessageAuditLogEntry.java new file mode 100644 index 0000000000..6e5072f66e --- /dev/null +++ b/java/common/src/main/java/feast/common/logging/entry/MessageAuditLogEntry.java @@ -0,0 +1,123 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2020 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.common.logging.entry; + +import com.google.auto.value.AutoValue; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import com.google.gson.JsonElement; +import com.google.gson.JsonParser; +import com.google.gson.JsonSerializationContext; +import com.google.gson.JsonSerializer; +import com.google.protobuf.Empty; +import com.google.protobuf.InvalidProtocolBufferException; +import com.google.protobuf.Message; +import com.google.protobuf.util.JsonFormat; +import io.grpc.Status.Code; +import java.lang.reflect.Type; +import java.util.UUID; + +/** MessageAuditLogEntry records the handling of a Protobuf message by a service call. */ +@AutoValue +public abstract class MessageAuditLogEntry extends AuditLogEntry { + /** @return Id used to identify the service call that the log entry is recording */ + public abstract UUID getId(); + + /** @return The name of the service that was used to handle the service call. */ + public abstract String getService(); + + /** @return The name of the method that was used to handle the service call. */ + public abstract String getMethod(); + + /** + * @return The request Protobuf {@link Message} that was passed to the Service in the service + * call. + */ + public abstract Message getRequest(); + + /** + * @return The response Protobuf {@link Message} that was passed to the Service in the service + * call. May be an {@link Empty} protobuf no request could be collected due to an error. + */ + public abstract Message getResponse(); + + /** + * @return The authenticated identity that was assumed during the handling of the service call. + * For example, the user id or email that identifies the user making the call. Empty if the + * service call is not authenticated. + */ + public abstract String getIdentity(); + + /** @return The result status code of the service call. */ + public abstract Code getStatusCode(); + + @AutoValue.Builder + public abstract static class Builder { + public abstract Builder setId(UUID id); + + public abstract Builder setComponent(String component); + + public abstract Builder setVersion(String component); + + public abstract Builder setKind(AuditLogEntryKind kind); + + public abstract Builder setService(String name); + + public abstract Builder setMethod(String name); + + public abstract Builder setRequest(Message request); + + public abstract Builder setResponse(Message response); + + public abstract Builder setIdentity(String identity); + + public abstract Builder setStatusCode(Code statusCode); + + public abstract MessageAuditLogEntry build(); + } + + public static MessageAuditLogEntry.Builder newBuilder() { + return new AutoValue_MessageAuditLogEntry.Builder() + .setKind(AuditLogEntryKind.MESSAGE) + .setId(UUID.randomUUID()); + } + + @Override + public String toJSON() { + // GSON requires custom typeadapter (serializer) to convert Protobuf messages to JSON properly + Gson gson = + new GsonBuilder() + .registerTypeAdapter( + Message.class, + new JsonSerializer() { + @Override + public JsonElement serialize( + Message message, Type type, JsonSerializationContext context) { + try { + String messageJSON = JsonFormat.printer().print(message); + return new JsonParser().parse(messageJSON); + } catch (InvalidProtocolBufferException e) { + + throw new RuntimeException( + "Unexpected exception converting Protobuf to JSON", e); + } + } + }) + .create(); + return gson.toJson(this); + } +} diff --git a/java/common/src/main/java/feast/common/logging/entry/TransitionAuditLogEntry.java b/java/common/src/main/java/feast/common/logging/entry/TransitionAuditLogEntry.java new file mode 100644 index 0000000000..224f10e0b5 --- /dev/null +++ b/java/common/src/main/java/feast/common/logging/entry/TransitionAuditLogEntry.java @@ -0,0 +1,45 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2020 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.common.logging.entry; + +import com.google.auto.value.AutoValue; + +/** TransitionAuditLogEntry records a transition in state/status in a specific resource. */ +@AutoValue +public abstract class TransitionAuditLogEntry extends AuditLogEntry { + /** @return The resource which the state/status transition occured. */ + public abstract LogResource getResource(); + + /** @return The end status with the resource transition to. */ + public abstract String getStatus(); + + /** + * Construct a new {@link AuditLogEntry} to record a transition in state/status in a specific + * resource. + * + * @param component The name of th Feast component producing this {@link AuditLogEntry}. + * @param version The version of Feast producing this {@link AuditLogEntry}. + * @param resource the resource which the transtion occured + * @param status the end status which the resource transitioned to. + * @return log entry to record a transition in state/status in a specific resource + */ + public static TransitionAuditLogEntry of( + String component, String version, LogResource resource, String status) { + return new AutoValue_TransitionAuditLogEntry( + component, version, AuditLogEntryKind.TRANSITION, resource, status); + } +} diff --git a/java/common/src/main/java/feast/common/logging/interceptors/GrpcMessageInterceptor.java b/java/common/src/main/java/feast/common/logging/interceptors/GrpcMessageInterceptor.java new file mode 100644 index 0000000000..661642a89a --- /dev/null +++ b/java/common/src/main/java/feast/common/logging/interceptors/GrpcMessageInterceptor.java @@ -0,0 +1,110 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.common.logging.interceptors; + +import com.google.protobuf.Empty; +import com.google.protobuf.Message; +import feast.common.logging.AuditLogger; +import feast.common.logging.config.LoggingProperties; +import feast.common.logging.entry.MessageAuditLogEntry; +import io.grpc.ForwardingServerCall.SimpleForwardingServerCall; +import io.grpc.ForwardingServerCallListener.SimpleForwardingServerCallListener; +import io.grpc.Metadata; +import io.grpc.ServerCall; +import io.grpc.ServerCall.Listener; +import io.grpc.ServerCallHandler; +import io.grpc.ServerInterceptor; +import io.grpc.Status; +import org.slf4j.event.Level; + +/** + * GrpcMessageInterceptor intercepts a GRPC calls to log handling of GRPC messages to the Audit Log. + * Intercepts the incoming and outgoing messages logs them to the audit log, together with method + * name and assumed authenticated identity (if authentication is enabled). NOTE: + * GrpcMessageInterceptor assumes that all service calls are unary (ie single request/response). + */ +public class GrpcMessageInterceptor implements ServerInterceptor { + private LoggingProperties loggingProperties; + + /** + * Construct GrpcMessageIntercetor. + * + * @param loggingProperties properties used to configure logging interceptor. + */ + public GrpcMessageInterceptor(LoggingProperties loggingProperties) { + this.loggingProperties = loggingProperties; + } + + @Override + public Listener interceptCall( + ServerCall call, Metadata headers, ServerCallHandler next) { + // Disable the message logging interceptor entirely if message logging is disabled. + if (!loggingProperties.getAudit().getMessageLogging().isEnabled()) { + return next.startCall(call, headers); + } + + MessageAuditLogEntry.Builder entryBuilder = MessageAuditLogEntry.newBuilder(); + // default response/request message to empty proto in log entry. + // request could be empty when the client closes the connection before sending a request + // message. + // response could be unset when the service encounters an error when processsing the service + // call. + entryBuilder.setRequest(Empty.newBuilder().build()); + entryBuilder.setResponse(Empty.newBuilder().build()); + + // Unpack service & method name from call + // full method name is in format ./ + String fullMethodName = call.getMethodDescriptor().getFullMethodName(); + entryBuilder.setService( + fullMethodName.substring(fullMethodName.lastIndexOf(".") + 1, fullMethodName.indexOf("/"))); + entryBuilder.setMethod(fullMethodName.substring(fullMethodName.indexOf("/") + 1)); + + // Attempt Extract current authenticated identity. + entryBuilder.setIdentity(""); + + // Register forwarding call to intercept outgoing response and log to audit log + call = + new SimpleForwardingServerCall(call) { + @Override + public void sendMessage(RespT message) { + // 2. Track the response & Log entry to audit logger + super.sendMessage(message); + entryBuilder.setResponse((Message) message); + } + + @Override + public void close(Status status, Metadata trailers) { + super.close(status, trailers); + // 3. Log the message log entry to the audit log + Level logLevel = (status.isOk()) ? Level.INFO : Level.ERROR; + entryBuilder.setStatusCode(status.getCode()); + AuditLogger.logMessage(logLevel, entryBuilder); + } + }; + + ServerCall.Listener listener = next.startCall(call, headers); + return new SimpleForwardingServerCallListener(listener) { + @Override + // Register listener to intercept incoming request messages and log to audit log + public void onMessage(ReqT message) { + super.onMessage(message); + // 1. Track the request. + entryBuilder.setRequest((Message) message); + } + }; + } +} diff --git a/java/common/src/main/java/feast/common/models/Feature.java b/java/common/src/main/java/feast/common/models/Feature.java new file mode 100644 index 0000000000..340a8cbe69 --- /dev/null +++ b/java/common/src/main/java/feast/common/models/Feature.java @@ -0,0 +1,58 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2020 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.common.models; + +import feast.proto.serving.ServingAPIProto.FeatureReferenceV2; + +public class Feature { + + /** + * Accepts FeatureReferenceV2 object and returns its reference in String + * "featuretable_name:feature_name". + * + * @param featureReference {@link FeatureReferenceV2} + * @return String format of FeatureReferenceV2 + */ + public static String getFeatureReference(FeatureReferenceV2 featureReference) { + String ref = featureReference.getFeatureName(); + if (!featureReference.getFeatureViewName().isEmpty()) { + ref = featureReference.getFeatureViewName() + ":" + ref; + } + return ref; + } + + /** + * Accepts either a feature reference of the form "featuretable_name:feature_name" or just a + * feature name, and returns just the feature name. For example, given either + * "driver_hourly_stats:conv_rate" or "conv_rate", "conv_rate" would be returned. + * + * @param featureReference {String} + * @return Base feature name of the feature reference + */ + public static String getFeatureName(String featureReference) { + String[] tokens = featureReference.split(":", 2); + return tokens[tokens.length - 1]; + } + + public static FeatureReferenceV2 parseFeatureReference(String featureReference) { + String[] tokens = featureReference.split(":", 2); + return FeatureReferenceV2.newBuilder() + .setFeatureViewName(tokens[0]) + .setFeatureName(tokens[1]) + .build(); + } +} diff --git a/java/common/src/main/java/feast/common/validators/OneOfStringValidator.java b/java/common/src/main/java/feast/common/validators/OneOfStringValidator.java new file mode 100644 index 0000000000..924953a2c4 --- /dev/null +++ b/java/common/src/main/java/feast/common/validators/OneOfStringValidator.java @@ -0,0 +1,51 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2020 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.common.validators; + +import java.util.Arrays; +import javax.validation.ConstraintValidator; +import javax.validation.ConstraintValidatorContext; + +/** Validates whether a string value is found within a collection. */ +public class OneOfStringValidator implements ConstraintValidator { + + /** Values that are permitted for a specific instance of this validator */ + String[] allowedValues; + + /** + * Initialize the OneOfStringValidator with a collection of allowed String values. + * + * @param constraintAnnotation constraint annotation + */ + @Override + public void initialize(OneOfStrings constraintAnnotation) { + allowedValues = constraintAnnotation.value(); + } + + /** + * Validates whether a string value is found within the collection defined in the annotation. + * + * @param value String value that should be validated + * @param context Provides contextual data and operation when applying a given constraint + * validator + * @return Boolean value indicating whether the string is found within the allowed values. + */ + @Override + public boolean isValid(String value, ConstraintValidatorContext context) { + return Arrays.asList(allowedValues).contains(value); + } +} diff --git a/java/common/src/main/java/feast/common/validators/OneOfStrings.java b/java/common/src/main/java/feast/common/validators/OneOfStrings.java new file mode 100644 index 0000000000..b236f6f1af --- /dev/null +++ b/java/common/src/main/java/feast/common/validators/OneOfStrings.java @@ -0,0 +1,51 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2020 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.common.validators; + +import java.lang.annotation.*; +import javax.validation.Constraint; +import javax.validation.Payload; + +/** + * Annotation for String "one of" validation. Allows for the definition of a collection through an + * annotation. The collection is used to test values defined in the object. + */ +@Target({ + ElementType.METHOD, + ElementType.FIELD, + ElementType.ANNOTATION_TYPE, + ElementType.CONSTRUCTOR, + ElementType.PARAMETER +}) +@Retention(RetentionPolicy.RUNTIME) +@Documented +@Constraint(validatedBy = OneOfStringValidator.class) +public @interface OneOfStrings { + /** @return Default error message that is returned if the incorrect value is set */ + String message() default "Field value must be one of the following: {value}"; + + /** @return Allows for the specification of validation groups to which this constraint belongs. */ + Class[] groups() default {}; + + /** + * @return An attribute payload that can be used to assign custom payload objects to a constraint. + */ + Class[] payload() default {}; + + /** @return Default value that is returned if no allowed values are configured */ + String[] value() default {}; +} diff --git a/java/common/src/main/resources/log4j2.xml b/java/common/src/main/resources/log4j2.xml new file mode 100644 index 0000000000..c75c2db13c --- /dev/null +++ b/java/common/src/main/resources/log4j2.xml @@ -0,0 +1,48 @@ + + + + + + + %d{yyyy-MM-dd HH:mm:ss.SSS} %5p ${hostName} --- [%15.15t] %-40.40c{1.} : %m%n%ex + + + {"time":"%d{yyyy-MM-dd'T'HH:mm:ssXXX}","hostname":"${hostName}","severity":"%p","message":%m}%n%ex + + + + + + + + + + + + + + + + + + + + + + + diff --git a/java/common/src/test/java/feast/common/logging/entry/AuditLogEntryTest.java b/java/common/src/test/java/feast/common/logging/entry/AuditLogEntryTest.java new file mode 100644 index 0000000000..bc3dcbcf74 --- /dev/null +++ b/java/common/src/test/java/feast/common/logging/entry/AuditLogEntryTest.java @@ -0,0 +1,107 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2020 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.common.logging.entry; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; + +import com.google.gson.JsonObject; +import com.google.gson.JsonParser; +import com.google.protobuf.Timestamp; +import feast.common.logging.entry.LogResource.ResourceType; +import feast.proto.serving.ServingAPIProto; +import feast.proto.serving.ServingAPIProto.FeatureReferenceV2; +import feast.proto.serving.ServingAPIProto.GetOnlineFeaturesRequestV2; +import feast.proto.serving.ServingAPIProto.GetOnlineFeaturesResponse; +import feast.proto.types.ValueProto.Value; +import io.grpc.Status; +import java.util.Arrays; +import java.util.List; +import org.junit.Test; + +public class AuditLogEntryTest { + public List getTestAuditLogs() { + GetOnlineFeaturesRequestV2 requestSpec = + GetOnlineFeaturesRequestV2.newBuilder() + .addAllFeatures( + Arrays.asList( + FeatureReferenceV2.newBuilder() + .setFeatureViewName("featuretable_1") + .setFeatureName("feature1") + .build(), + FeatureReferenceV2.newBuilder() + .setFeatureViewName("featuretable_1") + .setFeatureName("feature2") + .build())) + .build(); + + GetOnlineFeaturesResponse responseSpec = + GetOnlineFeaturesResponse.newBuilder() + .setMetadata( + ServingAPIProto.GetOnlineFeaturesResponseMetadata.newBuilder() + .setFeatureNames( + ServingAPIProto.FeatureList.newBuilder() + .addAllVal( + Arrays.asList( + "featuretable_1:feature_1", "featuretable_1:feature2")))) + .addAllResults( + Arrays.asList( + GetOnlineFeaturesResponse.FeatureVector.newBuilder() + .addValues(Value.newBuilder().setInt32Val(32).build()) + .addStatuses(ServingAPIProto.FieldStatus.PRESENT) + .addEventTimestamps(Timestamp.newBuilder().build()) + .build(), + GetOnlineFeaturesResponse.FeatureVector.newBuilder() + .addValues(Value.newBuilder().setInt32Val(64).build()) + .addStatuses(ServingAPIProto.FieldStatus.PRESENT) + .addEventTimestamps(Timestamp.newBuilder().build()) + .build())) + .build(); + + return Arrays.asList( + MessageAuditLogEntry.newBuilder() + .setComponent("feast-serving") + .setVersion("0.9") + .setService("ServingService") + .setMethod("getOnlineFeatures") + .setRequest(requestSpec) + .setResponse(responseSpec) + .setStatusCode(Status.OK.getCode()) + .setIdentity("adam@no.such.email") + .build(), + ActionAuditLogEntry.of( + "core", "0.9", LogResource.of(ResourceType.JOB, "kafka-to-redis"), "CREATE"), + TransitionAuditLogEntry.of( + "core", "0.9", LogResource.of(ResourceType.FEATURE_TABLE, "featuretable_1"), "READY")); + } + + @Test + public void shouldReturnJSONRepresentationOfAuditLog() { + for (AuditLogEntry auditLog : getTestAuditLogs()) { + // Check that auditLog's toJSON() returns valid JSON + String logJSON = auditLog.toJSON(); + System.out.println(logJSON); + JsonParser parser = new JsonParser(); + + // check basic fields are present in JSON representation. + JsonObject logObject = parser.parse(logJSON).getAsJsonObject(); + assertThat(logObject.getAsJsonPrimitive("logType").getAsString(), equalTo(auditLog.logType)); + assertThat( + logObject.getAsJsonPrimitive("kind").getAsString(), equalTo(auditLog.getKind().name())); + } + } +} diff --git a/java/common/src/test/java/feast/common/models/FeaturesTest.java b/java/common/src/test/java/feast/common/models/FeaturesTest.java new file mode 100644 index 0000000000..953da61afe --- /dev/null +++ b/java/common/src/test/java/feast/common/models/FeaturesTest.java @@ -0,0 +1,46 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2020 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.common.models; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.IsEqual.equalTo; + +import feast.proto.serving.ServingAPIProto.FeatureReferenceV2; +import org.junit.Before; +import org.junit.Test; + +public class FeaturesTest { + + private FeatureReferenceV2 featureReference; + + @Before + public void setUp() { + featureReference = + FeatureReferenceV2.newBuilder() + .setFeatureViewName("featuretable_1") + .setFeatureName("feature1") + .build(); + } + + @Test + public void shouldReturnFeatureStringRef() { + String actualFeatureStringRef = Feature.getFeatureReference(featureReference); + String expectedFeatureStringRef = "featuretable_1:feature1"; + + assertThat(actualFeatureStringRef, equalTo(expectedFeatureStringRef)); + } +} diff --git a/java/datatypes/README.md b/java/datatypes/README.md new file mode 100644 index 0000000000..7fc355f773 --- /dev/null +++ b/java/datatypes/README.md @@ -0,0 +1,55 @@ +Feast Data Types for Java +========================= + +This module produces Java class files for Feast's data type and gRPC service +definitions, from Protobuf IDL. These are used across Feast components for wire +interchange, contracts, etc. + +End users of Feast will be best served by our Java SDK which adds higher-level +conveniences, but the data types are published independently for custom needs, +without any additional dependencies the SDK may add. + +Dependency Coordinates +---------------------- + +```xml + + dev.feast + datatypes-java + 0.26.2 + +``` + +Use the version corresponding to the Feast release you have deployed in your +environment—see the [Feast release notes] for details. + +[Feast release notes]: ../../CHANGELOG.md + +Using the `.proto` Definitions +------------------------------ + +The `.proto` definitions are packaged as resources within the Maven artifact, +which may be useful to `include` them in dependent Protobuf definitions in a +downstream project, or for other JVM languages to consume from their builds to +generate more idiomatic bindings. + +Google's Gradle plugin, for instance, [can use protos in dependencies][Gradle] +either for `include` or to compile with a different `protoc` plugin than Java. + +[sbt-protoc] offers similar functionality for sbt/Scala. + +[Gradle]: https://github.com/google/protobuf-gradle-plugin#protos-in-dependencies +[sbt-protoc]: https://github.com/thesamet/sbt-protoc + +Releases +-------- + +The module is published to Maven Central upon each release of Feast (since +v0.3.7). + +For developers, the publishing process is automated along with the Java SDK by +[the `publish-java-sdk` build task in Prow][prow task], where you can see how +it works. Artifacts are staged to Sonatype where a maintainer needs to take a +release action for them to go live on Maven Central. + +[prow task]: https://github.com/feast-dev/feast/blob/17e7dca8238aae4dcbf0ff9f0db5d80ef8e035cf/.prow/config.yaml#L166-L192 diff --git a/java/datatypes/pom.xml b/java/datatypes/pom.xml new file mode 100644 index 0000000000..a5c82d4c45 --- /dev/null +++ b/java/datatypes/pom.xml @@ -0,0 +1,127 @@ + + + + 4.0.0 + + Feast Data Types for Java + + Data types and service contracts used throughout Feast components and + their interchanges. These are generated from Protocol Buffers and gRPC + definitions included in the package. + + + 11 + 11 + + feast-datatypes + + + dev.feast + feast-parent + ${revision} + ../ + + + + + + org.apache.maven.plugins + maven-dependency-plugin + + + + javax.annotation + + + + + + org.xolstice.maven.plugins + protobuf-maven-plugin + + true + + com.google.protobuf:protoc:${protoc.version}:exe:${os.detected.classifier} + + grpc-java + + io.grpc:protoc-gen-grpc-java:${grpc.version}:exe:${os.detected.classifier} + + + + + + compile + compile-custom + test-compile + + + + + + org.sonatype.plugins + nexus-staging-maven-plugin + + false + + + + + + + + + com.google.guava + guava + ${guava.version} + + + com.google.protobuf + protobuf-java + ${protobuf.version} + + + + io.grpc + grpc-core + ${grpc.version} + + + io.grpc + grpc-protobuf + ${grpc.version} + + + io.grpc + grpc-services + ${grpc.version} + + + io.grpc + grpc-stub + ${grpc.version} + + + javax.annotation + javax.annotation-api + + + + diff --git a/java/datatypes/src/main/proto/feast b/java/datatypes/src/main/proto/feast new file mode 120000 index 0000000000..463e4045de --- /dev/null +++ b/java/datatypes/src/main/proto/feast @@ -0,0 +1 @@ +../../../../../protos/feast \ No newline at end of file diff --git a/java/docs/coverage/pom.xml b/java/docs/coverage/pom.xml new file mode 100644 index 0000000000..f6e08909ee --- /dev/null +++ b/java/docs/coverage/pom.xml @@ -0,0 +1,87 @@ + + + + 4.0.0 + + + + + dev.feast + feast-parent + ${revision} + ../.. + + + Feast Coverage Java + feast-coverage + + + true + + + + + dev.feast + feast-storage-api + ${project.version} + + + + dev.feast + feast-storage-connector-redis + ${project.version} + + + + dev.feast + feast-serving + ${project.version} + + + + dev.feast + feast-serving-client + ${project.version} + + + + + + + org.jacoco + jacoco-maven-plugin + + + report-aggregate + prepare-package + + report-aggregate + + + + + + + + diff --git a/java/infra/docker/feature-server/Dockerfile b/java/infra/docker/feature-server/Dockerfile new file mode 100644 index 0000000000..dbd8c91472 --- /dev/null +++ b/java/infra/docker/feature-server/Dockerfile @@ -0,0 +1,52 @@ +# ============================================================ +# Build stage 1: Builder +# ============================================================ + +FROM maven:3.6-jdk-11 as builder + +WORKDIR /build + +COPY java/pom.xml . +COPY java/datatypes/pom.xml datatypes/pom.xml +COPY java/common/pom.xml common/pom.xml +COPY java/serving/pom.xml serving/pom.xml +COPY java/storage/api/pom.xml storage/api/pom.xml +COPY java/storage/connectors/pom.xml storage/connectors/pom.xml +COPY java/storage/connectors/redis/pom.xml storage/connectors/redis/pom.xml +COPY java/sdk/pom.xml sdk/pom.xml +COPY java/docs/coverage/pom.xml docs/coverage/pom.xml + +# Setting Maven repository .m2 directory relative to /build folder gives the +# user to optionally use cached repository when building the image by copying +# the existing .m2 directory to $FEAST_REPO_ROOT/.m2 +ENV MAVEN_OPTS="-Dmaven.repo.local=/build/.m2/repository -DdependencyLocationsEnabled=false -Dmaven.wagon.httpconnectionManager.ttlSeconds=25 -Dmaven.wagon.http.retryHandler.count=3" +COPY java/pom.xml .m2/* .m2/ +RUN mvn dependency:go-offline -DexcludeGroupIds:dev.feast 2>/dev/null || true + +COPY java/ . +COPY protos/feast datatypes/src/main/proto/feast + +ARG VERSION=dev +RUN mvn --also-make --projects serving -Drevision=$VERSION \ + -DskipUTs=true --batch-mode clean package +# +# Download grpc_health_probe to run health check for Feast Serving +# https://kubernetes.io/blog/2018/10/01/health-checking-grpc-servers-on-kubernetes/ +# +RUN wget -q https://github.com/grpc-ecosystem/grpc-health-probe/releases/download/v0.3.1/grpc_health_probe-linux-amd64 \ + -O /usr/bin/grpc-health-probe && \ + chmod +x /usr/bin/grpc-health-probe + +# ============================================================ +# Build stage 2: Production +# ============================================================ + +FROM amazoncorretto:11 as production +ARG VERSION=dev +COPY --from=builder /build/serving/target/feast-serving-$VERSION-jar-with-dependencies.jar /opt/feast/feast-serving.jar +COPY --from=builder /usr/bin/grpc-health-probe /usr/bin/grpc-health-probe +CMD ["java",\ + "-Xms1g",\ + "-Xmx4g",\ + "-jar",\ + "/opt/feast/feast-serving.jar"] diff --git a/java/infra/docker/feature-server/Dockerfile.dev b/java/infra/docker/feature-server/Dockerfile.dev new file mode 100644 index 0000000000..93bbbbb718 --- /dev/null +++ b/java/infra/docker/feature-server/Dockerfile.dev @@ -0,0 +1,15 @@ +FROM openjdk:11-jre as production +ARG REVISION=dev +# +# Download grpc_health_probe to run health check for Feast Serving +# https://kubernetes.io/blog/2018/10/01/health-checking-grpc-servers-on-kubernetes/ +# +RUN wget -q https://github.com/grpc-ecosystem/grpc-health-probe/releases/download/v0.3.1/grpc_health_probe-linux-amd64 \ + -O /usr/bin/grpc-health-probe && \ + chmod +x /usr/bin/grpc-health-probe +ADD $PWD/serving/target/feast-serving-$REVISION-exec.jar /opt/feast/feast-serving.jar +CMD ["java",\ + "-Xms1024m",\ + "-Xmx1024m",\ + "-jar",\ + "/opt/feast/feast-serving.jar"] diff --git a/java/pom.xml b/java/pom.xml new file mode 100644 index 0000000000..61a90792ed --- /dev/null +++ b/java/pom.xml @@ -0,0 +1,526 @@ + + + + 4.0.0 + + Feast + Feature Store for Machine Learning + ${github.url} + + dev.feast + feast-parent + ${revision} + pom + + + datatypes + storage/api + storage/connectors + serving + sdk + docs/coverage + common + + + + 0.20.2 + https://github.com/feast-dev/feast + + UTF-8 + UTF-8 + + 1.30.2 + 3.12.2 + 3.16.1 + 1.111.1 + 0.8.0 + 1.9.10 + 1.3 + 2.28.2 + + 0.26.0 + + 2.17.1 + 2.9.9 + 2.0.2 + 1.18.12 + 1.8.4 + 2.8.6 + 1.5.24 + 3.14.7 + 3.10 + 2.12.6 + 2.3.1 + 1.3.2 + 2.0.1.Final + 0.20.2 + 1.6.6 + 29.0-jre + + + + ${maven.multiModuleProjectDirectory} + + false + false + feast.common.auth.providers.http.client + + + + Tecton + https://www.tecton.ai + + + + + Feast Authors + ${github.url} + Tecton + https://www.tecton.ai + + + + + + Apache License, Version 2.0 + https://www.apache.org/licenses/LICENSE-2.0.txt + repo + + + + + ${github.url} + scm:git:${github.url}.git + scm:git:git@github.com:feast-dev/feast.git + HEAD + + + + GitHub Issues + ${github.url}/issues + + + + + + ossrh + https://oss.sonatype.org/content/repositories/snapshots + + + ossrh + https://oss.sonatype.org/service/local/staging/deploy/maven2/ + + + + + + + org.apache.logging.log4j + log4j-slf4j-impl + ${log4jVersion} + + + org.slf4j + slf4j-api + 1.7.30 + + + + javax.xml.bind + jaxb-api + ${javax.xml.bind.version} + + + javax.annotation + javax.annotation-api + ${javax.annotation.version} + + + javax.validation + validation-api + ${javax.validation.version} + + + + org.junit.platform + junit-platform-engine + 1.8.2 + test + + + org.junit.platform + junit-platform-commons + 1.8.2 + test + + + + + + + + kr.motd.maven + os-maven-plugin + 1.6.2 + + + + + + org.apache.maven.plugins + maven-source-plugin + 3.2.1 + + + attach-sources + + jar-no-fork + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + + + attach-javadocs + + jar + + + + + io.grpc.*:org.tensorflow.* + + + + com.diffplug.spotless + spotless-maven-plugin + 1.26.1 + + + + ${license.content} + + + 1.7 + + + + src/main/java/**/BatchLoadsWithResult.java + + + + + + ${license.content} + + + + + + + spotless-check + process-test-classes + + check + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 11 + 11 + 11 + + + com.google.auto.value + auto-value + ${auto.value.version} + + + org.projectlombok + lombok + ${lombok.version} + + + + + + org.apache.maven.plugins + maven-enforcer-plugin + 3.0.0-M2 + + + org.codehaus.mojo + extra-enforcer-rules + 1.2 + + + + + valid-build-environment + + enforce + + + + + [3.6,4.0) + + + [1.8.0,) + + + + + + + consistent-dependency-versions + + enforce + + + + + + + + + no-snapshot-deps-at-release + + enforce + + + + + true + + + + + + + + org.apache.maven.plugins + maven-surefire-plugin + 3.0.0-M5 + + @{argLine} -Xms2048m -Xmx2048m -Djdk.net.URLClassPath.disableClassPathURLCheck=true + ${skipUTs} + + + + org.junit.vintage + junit-vintage-engine + 5.6.3 + + + + + maven-failsafe-plugin + 3.0.0-M5 + + + org.junit.jupiter + junit-jupiter-engine + 5.6.2 + + + + + integration-tests + + integration-test + verify + + + + + + ${project.groupId}:${project.artifactId} + + + ${project.build.outputDirectory} + + + + + + org.sonatype.plugins + nexus-staging-maven-plugin + 1.6.8 + true + + ossrh + https://oss.sonatype.org/ + + true + true + + + + + org.codehaus.mojo + flatten-maven-plugin + 1.1.0 + + oss + + + + flatten + process-resources + + flatten + + + + flatten.clean + clean + + clean + + + + + + + org.apache.maven.plugins + maven-gpg-plugin + 1.6 + + + sign-artifacts + verify + + sign + + + + + --pinentry-mode + loopback + + + + ${gpg.passphrase} + + + + + + + + + + io.fabric8 + docker-maven-plugin + 0.20.2 + + + org.apache.maven.plugins + maven-compiler-plugin + 3.8.1 + + + -Xlint:all + + + + + org.apache.maven.plugins + maven-dependency-plugin + 3.1.1 + + + + org.apache.maven.shared + maven-dependency-analyzer + 1.11.1 + + + + + org.apache.maven.plugins + maven-javadoc-plugin + 3.1.1 + + all + + + + org.codehaus.mojo + exec-maven-plugin + 1.6.0 + + false + + + + org.jacoco + jacoco-maven-plugin + 0.8.5 + + + + prepare-agent + + + + + + org.xolstice.maven.plugins + protobuf-maven-plugin + 0.6.1 + + + + + diff --git a/java/sdk/pom.xml b/java/sdk/pom.xml new file mode 100644 index 0000000000..5896214b27 --- /dev/null +++ b/java/sdk/pom.xml @@ -0,0 +1,155 @@ + + + 4.0.0 + + Feast SDK for Java + SDK for registering, storing, and retrieving features + feast-serving-client + + + dev.feast + feast-parent + ${revision} + ../ + + + + + 5.5.2 + 2.28.2 + 0.33.0 + + + + + dev.feast + feast-datatypes + ${project.version} + + + + + io.grpc + grpc-netty-shaded + ${grpc.version} + + + io.grpc + grpc-protobuf + ${grpc.version} + + + io.grpc + grpc-stub + ${grpc.version} + + + io.grpc + grpc-testing + ${grpc.version} + + + com.google.protobuf + protobuf-java-util + ${protobuf.version} + + + com.google.protobuf + protobuf-java + ${protobuf.version} + + + com.google.auto.value + auto-value-annotations + ${auto.value.version} + + + + + io.opentracing.contrib + opentracing-grpc + 0.2.3 + + + io.opentracing + opentracing-api + ${opentracing.version} + + + io.opentracing + opentracing-noop + ${opentracing.version} + + + + + org.slf4j + slf4j-api + + + + + org.junit.jupiter + junit-jupiter-engine + ${junit.version} + test + + + org.junit.jupiter + junit-jupiter-api + ${junit.version} + test + + + org.junit.jupiter + junit-jupiter-params + ${junit.version} + test + + + org.apache.commons + commons-lang3 + 3.6 + compile + + + org.mockito + mockito-core + ${mockito.version} + test + + + org.mockito + mockito-inline + ${mockito.version} + test + + + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + + true + + + + org.jacoco + jacoco-maven-plugin + + + org.sonatype.plugins + nexus-staging-maven-plugin + + false + + + + + + diff --git a/java/sdk/src/main/java/dev/feast/FeastClient.java b/java/sdk/src/main/java/dev/feast/FeastClient.java new file mode 100644 index 0000000000..c10a76ecf8 --- /dev/null +++ b/java/sdk/src/main/java/dev/feast/FeastClient.java @@ -0,0 +1,224 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package dev.feast; + +import com.google.common.collect.Lists; +import feast.proto.serving.ServingAPIProto; +import feast.proto.serving.ServingAPIProto.GetFeastServingInfoRequest; +import feast.proto.serving.ServingAPIProto.GetFeastServingInfoResponse; +import feast.proto.serving.ServingAPIProto.GetOnlineFeaturesRequest; +import feast.proto.serving.ServingAPIProto.GetOnlineFeaturesResponse; +import feast.proto.serving.ServingServiceGrpc; +import feast.proto.serving.ServingServiceGrpc.ServingServiceBlockingStub; +import feast.proto.types.ValueProto; +import io.grpc.CallCredentials; +import io.grpc.ManagedChannel; +import io.grpc.ManagedChannelBuilder; +import io.grpc.netty.shaded.io.grpc.netty.GrpcSslContexts; +import io.grpc.netty.shaded.io.grpc.netty.NettyChannelBuilder; +import io.opentracing.contrib.grpc.TracingClientInterceptor; +import io.opentracing.util.GlobalTracer; +import java.io.File; +import java.time.Instant; +import java.util.*; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import javax.net.ssl.SSLException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@SuppressWarnings("WeakerAccess") +public class FeastClient implements AutoCloseable { + Logger logger = LoggerFactory.getLogger(FeastClient.class); + + private static final int CHANNEL_SHUTDOWN_TIMEOUT_SEC = 5; + + private final ManagedChannel channel; + private final ServingServiceBlockingStub stub; + + /** + * Create a client to access Feast Serving. + * + * @param host hostname or ip address of Feast serving GRPC server + * @param port port number of Feast serving GRPC server + * @return {@link FeastClient} + */ + public static FeastClient create(String host, int port) { + // configure client with no security config. + return FeastClient.createSecure(host, port, SecurityConfig.newBuilder().build()); + } + + /** + * Create a authenticated client that can access Feast serving with authentication enabled. + * + * @param host hostname or ip address of Feast serving GRPC server + * @param port port number of Feast serving GRPC server + * @param securityConfig security options to configure the Feast client. See {@link + * SecurityConfig} for options. + * @return {@link FeastClient} + */ + public static FeastClient createSecure(String host, int port, SecurityConfig securityConfig) { + // Configure client TLS + ManagedChannel channel = null; + if (securityConfig.isTLSEnabled()) { + if (securityConfig.getCertificatePath().isPresent()) { + String certificatePath = securityConfig.getCertificatePath().get(); + // Use custom certificate for TLS + File certificateFile = new File(certificatePath); + try { + channel = + NettyChannelBuilder.forAddress(host, port) + .useTransportSecurity() + .sslContext(GrpcSslContexts.forClient().trustManager(certificateFile).build()) + .build(); + } catch (SSLException e) { + throw new IllegalArgumentException( + String.format("Invalid Certificate provided at path: %s", certificatePath), e); + } + } else { + // Use system certificates for TLS + channel = ManagedChannelBuilder.forAddress(host, port).useTransportSecurity().build(); + } + } else { + // Disable TLS + channel = ManagedChannelBuilder.forAddress(host, port).usePlaintext().build(); + } + + return new FeastClient(channel, securityConfig.getCredentials()); + } + + /** + * Obtain info about Feast Serving. + * + * @return {@link GetFeastServingInfoResponse} containing Feast version, Serving type etc. + */ + public GetFeastServingInfoResponse getFeastServingInfo() { + return stub.getFeastServingInfo(GetFeastServingInfoRequest.newBuilder().build()); + } + + /** + * Get online features from Feast, without indicating project, will use `default`. + * + *

See {@link #getOnlineFeatures(List, List, String)} + * + * @param featureRefs list of string feature references to retrieve in the following format + * featureTable:feature, where 'featureTable' and 'feature' refer to the FeatureTable and + * Feature names respectively. Only the Feature name is required. + * @param entities list of {@link Row} to select the entities to retrieve the features for. + * @return list of {@link Row} containing retrieved data fields. + */ + public List getOnlineFeatures(List featureRefs, List entities) { + GetOnlineFeaturesRequest.Builder requestBuilder = GetOnlineFeaturesRequest.newBuilder(); + + requestBuilder.setFeatures( + ServingAPIProto.FeatureList.newBuilder().addAllVal(featureRefs).build()); + + requestBuilder.putAllEntities(getEntityValuesMap(entities)); + + GetOnlineFeaturesResponse response = stub.getOnlineFeatures(requestBuilder.build()); + + List results = Lists.newArrayList(); + if (response.getResultsCount() == 0) { + return results; + } + + for (int rowIdx = 0; rowIdx < response.getResults(0).getValuesCount(); rowIdx++) { + Row row = Row.create(); + for (int featureIdx = 0; featureIdx < response.getResultsCount(); featureIdx++) { + row.set( + response.getMetadata().getFeatureNames().getVal(featureIdx), + response.getResults(featureIdx).getValues(rowIdx), + response.getResults(featureIdx).getStatuses(rowIdx)); + + row.setEntityTimestamp( + Instant.ofEpochSecond( + response.getResults(featureIdx).getEventTimestamps(rowIdx).getSeconds())); + } + for (Map.Entry entry : + entities.get(rowIdx).getFields().entrySet()) { + row.set(entry.getKey(), entry.getValue()); + } + + results.add(row); + } + return results; + } + + private Map getEntityValuesMap(List entities) { + Map columnarEntities = new HashMap<>(); + for (Row row : entities) { + for (Map.Entry field : row.getFields().entrySet()) { + if (!columnarEntities.containsKey(field.getKey())) { + columnarEntities.put(field.getKey(), ValueProto.RepeatedValue.newBuilder()); + } + columnarEntities.get(field.getKey()).addVal(field.getValue()); + } + } + + return columnarEntities.entrySet().stream() + .map((e) -> Map.entry(e.getKey(), e.getValue().build())) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + + /** + * Get online features from Feast. + * + *

Example of retrieving online features for the driver FeatureTable, with features driver_id + * and driver_name + * + *

{@code
+   * FeastClient client = FeastClient.create("localhost", 6566);
+   * List requestedFeatureIds = Arrays.asList("driver:driver_id", "driver:driver_name");
+   * List requestedRows =
+   *         Arrays.asList(Row.create().set("driver_id", 123), Row.create().set("driver_id", 456));
+   * List retrievedFeatures = client.getOnlineFeatures(requestedFeatureIds, requestedRows);
+   * retrievedFeatures.forEach(System.out::println);
+   * }
+ * + * @param featureRefs list of string feature references to retrieve in the following format + * featureTable:feature, where 'featureTable' and 'feature' refer to the FeatureTable and + * Feature names respectively. Only the Feature name is required. + * @param rows list of {@link Row} to select the entities to retrieve the features for + * @param project {@link String} Specifies the project override. If specified uses the project for + * retrieval. Overrides the projects set in Feature References if also specified. + * @return list of {@link Row} containing retrieved data fields. + */ + public List getOnlineFeatures(List featureRefs, List rows, String project) { + return getOnlineFeatures(featureRefs, rows); + } + + protected FeastClient(ManagedChannel channel, Optional credentials) { + this.channel = channel; + TracingClientInterceptor tracingInterceptor = + TracingClientInterceptor.newBuilder().withTracer(GlobalTracer.get()).build(); + + ServingServiceBlockingStub servingStub = + ServingServiceGrpc.newBlockingStub(tracingInterceptor.intercept(channel)); + + if (credentials.isPresent()) { + servingStub = servingStub.withCallCredentials(credentials.get()); + } + + this.stub = servingStub; + } + + public void close() throws Exception { + if (channel != null) { + channel.shutdown().awaitTermination(CHANNEL_SHUTDOWN_TIMEOUT_SEC, TimeUnit.SECONDS); + } + } +} diff --git a/java/sdk/src/main/java/dev/feast/RequestUtil.java b/java/sdk/src/main/java/dev/feast/RequestUtil.java new file mode 100644 index 0000000000..fc13c45311 --- /dev/null +++ b/java/sdk/src/main/java/dev/feast/RequestUtil.java @@ -0,0 +1,80 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package dev.feast; + +import feast.proto.serving.ServingAPIProto.FeatureReferenceV2; +import java.util.List; +import java.util.stream.Collectors; + +@SuppressWarnings("WeakerAccess") +public class RequestUtil { + + /** + * Create feature references protos from given string feature reference. + * + * @param featureRefStrings to create Feature Reference protos from + * @return List of parsed {@link FeatureReferenceV2} protos + */ + public static List createFeatureRefs(List featureRefStrings) { + if (featureRefStrings == null) { + throw new IllegalArgumentException("FeatureReferences cannot be null"); + } + + List featureRefs = + featureRefStrings.stream() + .map(refStr -> parseFeatureRef(refStr)) + .collect(Collectors.toList()); + + return featureRefs; + } + + /** + * Parse a feature reference proto builder from the given featureRefString + * + * @param featureRefString string feature reference to parse from. + * @return a parsed {@link FeatureReferenceV2} + */ + public static FeatureReferenceV2 parseFeatureRef(String featureRefString) { + featureRefString = featureRefString.trim(); + if (featureRefString.isEmpty()) { + throw new IllegalArgumentException("Cannot parse a empty feature reference"); + } + if (featureRefString.contains("/")) { + throw new IllegalArgumentException( + String.format( + "Unsupported feature reference: Specifying project in string" + + " Feature References is not longer supported: %s", + featureRefString)); + } + if (!featureRefString.contains(":")) { + throw new IllegalArgumentException( + String.format( + "Unsupported feature reference: %s - FeatureTable name and Feature name should be provided in string" + + " Feature References, in : format.", + featureRefString)); + } + + String[] featureReferenceParts = featureRefString.split(":"); + FeatureReferenceV2 featureRef = + FeatureReferenceV2.newBuilder() + .setFeatureViewName(featureReferenceParts[0]) + .setFeatureName(featureReferenceParts[1]) + .build(); + + return featureRef; + } +} diff --git a/java/sdk/src/main/java/dev/feast/Row.java b/java/sdk/src/main/java/dev/feast/Row.java new file mode 100644 index 0000000000..308daa5a2f --- /dev/null +++ b/java/sdk/src/main/java/dev/feast/Row.java @@ -0,0 +1,161 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package dev.feast; + +import com.google.protobuf.ByteString; +import com.google.protobuf.Timestamp; +import com.google.protobuf.util.Timestamps; +import feast.proto.serving.ServingAPIProto.FieldStatus; +import feast.proto.types.ValueProto.Value; +import feast.proto.types.ValueProto.Value.ValCase; +import java.time.Instant; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +@SuppressWarnings("UnusedReturnValue") +public class Row { + private Timestamp entity_timestamp; + private HashMap fields; + private HashMap fieldStatuses; + + public static Row create() { + Row row = new Row(); + row.entity_timestamp = Timestamps.fromMillis(System.currentTimeMillis()); + row.fields = new HashMap<>(); + row.fieldStatuses = new HashMap<>(); + return row; + } + + public Row setEntityTimestamp(Instant timestamp) { + entity_timestamp = Timestamps.fromMillis(timestamp.toEpochMilli()); + return this; + } + + public Timestamp getEntityTimestamp() { + return entity_timestamp; + } + + public Row setEntityTimestamp(String dateTime) { + entity_timestamp = Timestamps.fromMillis(Instant.parse(dateTime).toEpochMilli()); + return this; + } + + public Row set(String fieldName, Object value) { + return this.set(fieldName, value, FieldStatus.PRESENT); + } + + public Row set(String fieldName, Object value, FieldStatus status) { + String valueType = value.getClass().getCanonicalName(); + switch (valueType) { + case "java.lang.Integer": + fields.put(fieldName, Value.newBuilder().setInt32Val((int) value).build()); + break; + case "java.lang.Long": + fields.put(fieldName, Value.newBuilder().setInt64Val((long) value).build()); + break; + case "java.lang.Float": + fields.put(fieldName, Value.newBuilder().setFloatVal((float) value).build()); + break; + case "java.lang.Double": + fields.put(fieldName, Value.newBuilder().setDoubleVal((double) value).build()); + break; + case "java.lang.String": + fields.put(fieldName, Value.newBuilder().setStringVal((String) value).build()); + break; + case "byte[]": + fields.put( + fieldName, Value.newBuilder().setBytesVal(ByteString.copyFrom((byte[]) value)).build()); + break; + case "feast.proto.types.ValueProto.Value": + fields.put(fieldName, (Value) value); + break; + default: + throw new IllegalArgumentException( + String.format( + "Type '%s' is unsupported in Feast. Please use one of these value types: Integer, Long, Float, Double, String, byte[].", + valueType)); + } + + fieldStatuses.put(fieldName, status); + return this; + } + + public Map getFields() { + return fields; + } + + public Integer getInt(String fieldName) { + return getValue(fieldName).map(Value::getInt32Val).orElse(null); + } + + public Long getLong(String fieldName) { + return getValue(fieldName).map(Value::getInt64Val).orElse(null); + } + + public Float getFloat(String fieldName) { + return getValue(fieldName).map(Value::getFloatVal).orElse(null); + } + + public Double getDouble(String fieldName) { + return getValue(fieldName).map(Value::getDoubleVal).orElse(null); + } + + public String getString(String fieldName) { + return getValue(fieldName).map(Value::getStringVal).orElse(null); + } + + public byte[] getByte(String fieldName) { + return getValue(fieldName).map(Value::getBytesVal).map(ByteString::toByteArray).orElse(null); + } + + public Map getStatuses() { + return fieldStatuses; + } + + public FieldStatus getStatus(String fieldName) { + return fieldStatuses.get(fieldName); + } + + @Override + public String toString() { + List parts = new ArrayList<>(); + fields.forEach( + (key, value) -> + parts.add( + key + + ":" + + (value.getValCase().equals(ValCase.VAL_NOT_SET) + ? "NULL" + : value.toString().trim()))); + return String.join(", ", parts); + } + + private Optional getValue(String fieldName) { + if (!fields.containsKey(fieldName)) { + throw new IllegalArgumentException( + String.format("Row does not contain field '%s'", fieldName)); + } + Value value = fields.get(fieldName); + if (value.getValCase().equals(ValCase.VAL_NOT_SET)) { + return Optional.empty(); + } + return Optional.of(value); + } +} diff --git a/java/sdk/src/main/java/dev/feast/SecurityConfig.java b/java/sdk/src/main/java/dev/feast/SecurityConfig.java new file mode 100644 index 0000000000..29acb97631 --- /dev/null +++ b/java/sdk/src/main/java/dev/feast/SecurityConfig.java @@ -0,0 +1,66 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package dev.feast; + +import com.google.auto.value.AutoValue; +import io.grpc.CallCredentials; +import java.util.Optional; + +/** SecurityConfig captures the security related configuration for FeastClient */ +@AutoValue +public abstract class SecurityConfig { + /** + * Enables authentication If specified, the call credentials used to provide credentials to + * authenticate with Feast. + * + * @return credentials + */ + public abstract Optional getCredentials(); + + /** + * Whether to use TLS transport security is use when connecting to Feast. + * + * @return true if enabled + */ + public abstract boolean isTLSEnabled(); + + /** + * If specified and TLS is enabled, provides path to TLS certificate use the verify Service + * identity. + * + * @return certificate path + */ + public abstract Optional getCertificatePath(); + + @AutoValue.Builder + public abstract static class Builder { + public abstract Builder setCredentials(Optional credentials); + + public abstract Builder setTLSEnabled(boolean isTLSEnabled); + + public abstract Builder setCertificatePath(Optional certificatePath); + + public abstract SecurityConfig build(); + } + + public static SecurityConfig.Builder newBuilder() { + return new AutoValue_SecurityConfig.Builder() + .setCredentials(Optional.empty()) + .setTLSEnabled(false) + .setCertificatePath(Optional.empty()); + } +} diff --git a/java/sdk/src/test/java/dev/feast/FeastClientTest.java b/java/sdk/src/test/java/dev/feast/FeastClientTest.java new file mode 100644 index 0000000000..1dfb9989c9 --- /dev/null +++ b/java/sdk/src/test/java/dev/feast/FeastClientTest.java @@ -0,0 +1,178 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package dev.feast; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.AdditionalAnswers.delegatesTo; +import static org.mockito.Mockito.mock; + +import com.google.protobuf.Timestamp; +import feast.proto.serving.ServingAPIProto; +import feast.proto.serving.ServingAPIProto.FieldStatus; +import feast.proto.serving.ServingAPIProto.GetOnlineFeaturesRequest; +import feast.proto.serving.ServingAPIProto.GetOnlineFeaturesResponse; +import feast.proto.serving.ServingServiceGrpc.ServingServiceImplBase; +import feast.proto.types.ValueProto; +import feast.proto.types.ValueProto.Value; +import io.grpc.*; +import io.grpc.inprocess.InProcessChannelBuilder; +import io.grpc.inprocess.InProcessServerBuilder; +import io.grpc.stub.StreamObserver; +import io.grpc.testing.GrpcCleanupRule; +import java.time.Instant; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Optional; +import java.util.concurrent.atomic.AtomicBoolean; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; + +public class FeastClientTest { + private final String AUTH_TOKEN = "test token"; + + @Rule public GrpcCleanupRule grpcRule; + private AtomicBoolean isAuthenticated; + + private ServingServiceImplBase servingMock = + mock( + ServingServiceImplBase.class, + delegatesTo( + new ServingServiceImplBase() { + @Override + public void getOnlineFeatures( + GetOnlineFeaturesRequest request, + StreamObserver responseObserver) { + if (!request.equals(FeastClientTest.getFakeRequest())) { + responseObserver.onError(Status.FAILED_PRECONDITION.asRuntimeException()); + } + + responseObserver.onNext(FeastClientTest.getFakeResponse()); + responseObserver.onCompleted(); + } + })); + + private FeastClient client; + + @Before + public void setup() throws Exception { + this.grpcRule = new GrpcCleanupRule(); + this.isAuthenticated = new AtomicBoolean(false); + // setup fake serving service + String serverName = InProcessServerBuilder.generateName(); + this.grpcRule.register( + InProcessServerBuilder.forName(serverName) + .directExecutor() + .addService(this.servingMock) + .build() + .start()); + + // setup test feast client target + ManagedChannel channel = + this.grpcRule.register( + InProcessChannelBuilder.forName(serverName).directExecutor().build()); + this.client = new FeastClient(channel, Optional.empty()); + } + + @Test + public void shouldGetOnlineFeatures() { + shouldGetOnlineFeaturesWithClient(this.client); + } + + private void shouldGetOnlineFeaturesWithClient(FeastClient client) { + List rows = + client.getOnlineFeatures( + Arrays.asList("driver:name", "driver:rating", "driver:null_value"), + Arrays.asList( + Row.create().set("driver_id", 1).setEntityTimestamp(Instant.ofEpochSecond(100))), + "driver_project"); + + assertEquals( + rows.get(0).getFields(), + new HashMap() { + { + put("driver_id", intValue(1)); + put("driver:name", strValue("david")); + put("driver:rating", intValue(3)); + put("driver:null_value", Value.newBuilder().build()); + } + }); + assertEquals( + rows.get(0).getStatuses(), + new HashMap() { + { + put("driver_id", FieldStatus.PRESENT); + put("driver:name", FieldStatus.PRESENT); + put("driver:rating", FieldStatus.PRESENT); + put("driver:null_value", FieldStatus.NULL_VALUE); + } + }); + } + + private static GetOnlineFeaturesRequest getFakeRequest() { + // setup mock serving service stub + return GetOnlineFeaturesRequest.newBuilder() + .setFeatures( + ServingAPIProto.FeatureList.newBuilder() + .addVal("driver:name") + .addVal("driver:rating") + .addVal("driver:null_value") + .build()) + .putEntities("driver_id", ValueProto.RepeatedValue.newBuilder().addVal(intValue(1)).build()) + .build(); + } + + private static GetOnlineFeaturesResponse getFakeResponse() { + return GetOnlineFeaturesResponse.newBuilder() + .addResults( + GetOnlineFeaturesResponse.FeatureVector.newBuilder() + .addValues(strValue("david")) + .addStatuses(FieldStatus.PRESENT) + .addEventTimestamps(Timestamp.newBuilder()) + .build()) + .addResults( + GetOnlineFeaturesResponse.FeatureVector.newBuilder() + .addValues(intValue(3)) + .addStatuses(FieldStatus.PRESENT) + .addEventTimestamps(Timestamp.newBuilder()) + .build()) + .addResults( + GetOnlineFeaturesResponse.FeatureVector.newBuilder() + .addValues(Value.newBuilder().build()) + .addStatuses(FieldStatus.NULL_VALUE) + .addEventTimestamps(Timestamp.newBuilder()) + .build()) + .setMetadata( + ServingAPIProto.GetOnlineFeaturesResponseMetadata.newBuilder() + .setFeatureNames( + ServingAPIProto.FeatureList.newBuilder() + .addVal("driver:name") + .addVal("driver:rating") + .addVal("driver:null_value")) + .build()) + .build(); + } + + private static Value strValue(String val) { + return Value.newBuilder().setStringVal(val).build(); + } + + private static Value intValue(int val) { + return Value.newBuilder().setInt32Val(val).build(); + } +} diff --git a/java/sdk/src/test/java/dev/feast/RequestUtilTest.java b/java/sdk/src/test/java/dev/feast/RequestUtilTest.java new file mode 100644 index 0000000000..e5684ecd18 --- /dev/null +++ b/java/sdk/src/test/java/dev/feast/RequestUtilTest.java @@ -0,0 +1,105 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package dev.feast; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +import com.google.common.collect.ImmutableList; +import com.google.protobuf.TextFormat; +import feast.proto.serving.ServingAPIProto.FeatureReferenceV2; +import java.util.Arrays; +import java.util.Comparator; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.junit.jupiter.params.provider.NullSource; + +class RequestUtilTest { + + private static Stream provideValidFeatureRefs() { + return Stream.of( + Arguments.of( + Arrays.asList("driver:driver_id"), + Arrays.asList( + FeatureReferenceV2.newBuilder() + .setFeatureViewName("driver") + .setFeatureName("driver_id") + .build()))); + } + + @ParameterizedTest + @MethodSource("provideValidFeatureRefs") + void createFeatureRefs_ShouldReturnFeaturesForValidFeatureRefs( + List input, List expected) { + List actual = RequestUtil.createFeatureRefs(input); + // Order of the actual and expected FeatureTables do no not matter + actual.sort(Comparator.comparing(FeatureReferenceV2::getFeatureName)); + expected.sort(Comparator.comparing(FeatureReferenceV2::getFeatureName)); + assertEquals(expected.size(), actual.size()); + for (int i = 0; i < expected.size(); i++) { + String expectedString = TextFormat.printer().printToString(expected.get(i)); + String actualString = TextFormat.printer().printToString(actual.get(i)); + assertEquals(expectedString, actualString); + } + } + + @ParameterizedTest + @MethodSource("provideValidFeatureRefs") + void renderFeatureRef_ShouldReturnFeatureRefString( + List expected, List input) { + input = input.stream().map(ref -> ref.toBuilder().build()).collect(Collectors.toList()); + List actual = + input.stream() + .map(ref -> String.format("%s:%s", ref.getFeatureViewName(), ref.getFeatureName())) + .collect(Collectors.toList()); + assertEquals(expected.size(), actual.size()); + for (int i = 0; i < expected.size(); i++) { + assertEquals(expected.get(i), actual.get(i)); + } + } + + private static Stream provideInvalidFeatureRefs() { + return Stream.of(Arguments.of(ImmutableList.of("project/feature"))); + } + + private static Stream provideMissingFeatureTableFeatureRefs() { + return Stream.of(Arguments.of(ImmutableList.of("feature"))); + } + + @ParameterizedTest + @MethodSource("provideInvalidFeatureRefs") + void createFeatureRefs_ShouldThrowExceptionForProjectInFeatureRefs(List input) { + assertThrows(IllegalArgumentException.class, () -> RequestUtil.createFeatureRefs(input)); + } + + @ParameterizedTest + @MethodSource("provideMissingFeatureTableFeatureRefs") + void createFeatureRefs_ShouldThrowExceptionForMissingFeatureTableInFeatureRefs( + List input) { + assertThrows(IllegalArgumentException.class, () -> RequestUtil.createFeatureRefs(input)); + } + + @ParameterizedTest + @NullSource + void createFeatureRefs_ShouldThrowExceptionForNullFeatureRefs(List input) { + assertThrows(IllegalArgumentException.class, () -> RequestUtil.createFeatureRefs(input)); + } +} diff --git a/java/serving/.dockerignore b/java/serving/.dockerignore new file mode 100644 index 0000000000..441c3d1a50 --- /dev/null +++ b/java/serving/.dockerignore @@ -0,0 +1,3 @@ +*/.gradle/ +.idea/ +*/build/ \ No newline at end of file diff --git a/java/serving/.gitignore b/java/serving/.gitignore new file mode 100644 index 0000000000..6c6b6d8d8f --- /dev/null +++ b/java/serving/.gitignore @@ -0,0 +1,37 @@ +### Scratch files ### +scratch* + +### Local Environment ### +*local*.env + +### Gradle ### +.gradle +**/build/ +!gradle/wrapper/gradle-wrapper.jar +feast-serving.jar + +### STS ### +.apt_generated +.classpath +.factorypath +.project +.settings +.springBeans +.sts4-cache + +### IntelliJ IDEA ### +.idea +*.iws +*.iml +*.ipr +/out/ + +### NetBeans ### +/nbproject/private/ +/nbbuild/ +/dist/ +/nbdist/ +/.nb-gradle/ + +## Feast Temporary Files ## +/temp/ \ No newline at end of file diff --git a/java/serving/README.md b/java/serving/README.md new file mode 100644 index 0000000000..5ac7194924 --- /dev/null +++ b/java/serving/README.md @@ -0,0 +1,139 @@ +## Getting Started Guide for Feast Serving Developers + +### Overview +This guide is targeted at developers looking to contribute to Feast Serving: +- [Building and running Feast Serving locally](#building-and-running-feast-serving-locally) + +### Pre-requisites: + +- [Maven](https://maven.apache.org/install.html) build tool version 3.6.x +- A Feast feature repo (e.g. https://github.com/feast-dev/feast-demo) +- A running Store instance e.g. local Redis instance with `redis-server` + +### Building and running Feast Serving locally: +From the Feast GitHub root, run: + +1. `mvn -f java/pom.xml install -Dmaven.test.skip=true` +2. Package an executable jar for serving: `mvn -f java/serving/pom.xml package -Dmaven.test.skip=true` +3. Make a file called `application-override.yaml` that specifies your Feast repo project and registry path: + 1. Note if you have a remote registry, you can specify that too (e.g. `gs://...`) + ```yaml + feast: + project: feast_demo + registry: /Users/[your username]/GitHub/feast-demo/feature_repo/data/registry.db + ``` + 2. An example of if you're using Redis with a remote registry: + ```yaml + feast: + project: feast_java_demo + registry: gs://[YOUR BUCKET]/demo-repo/registry.db + activeStore: online + stores: + - name: online + type: REDIS + config: + host: localhost + port: 6379 + password: [YOUR PASSWORD] + ``` +4. Run the jar with dependencies that was built from Maven (note the version might vary): + ``` + java \ + -Xms1g \ + -Xmx4g \ + -jar java/serving/target/feast-serving-0.17.1-SNAPSHOT-jar-with-dependencies.jar \ + classpath:/application.yml,file:./application-override.yaml + ``` +5. Now you have a Feast Serving gRPC service running on port 6566 locally! + +### Running test queries +If you have [grpc_cli](https://github.com/grpc/grpc/blob/master/doc/command_line_tool.md) installed, you can check that Feast Serving is running +``` +grpc_cli ls localhost:6566 +``` + +An example of fetching features +```bash +grpc_cli call localhost:6566 GetOnlineFeatures ' +features { + val: "driver_hourly_stats:conv_rate" + val: "driver_hourly_stats:acc_rate" +} +entities { + key: "driver_id" + value { + val { + int64_val: 1001 + } + val { + int64_val: 1002 + } + } +} +' +``` +Example output: +``` +connecting to localhost:6566 +metadata { + feature_names { + val: "driver_hourly_stats:conv_rate" + val: "driver_hourly_stats:acc_rate" + } +} +results { + values { + float_val: 0.812357187 + } + values { + float_val: 0.379484832 + } + statuses: PRESENT + statuses: PRESENT + event_timestamps { + seconds: 1631725200 + } + event_timestamps { + seconds: 1631725200 + } +} +results { + values { + float_val: 0.840873241 + } + values { + float_val: 0.151376978 + } + statuses: PRESENT + statuses: PRESENT + event_timestamps { + seconds: 1631725200 + } + event_timestamps { + seconds: 1631725200 + } +} +Rpc succeeded with OK status +``` + +### Debugging Feast Serving +You can debug this like any other Java executable. Swap the java command above with: +``` + java \ + -Xdebug \ + -Xrunjdwp:transport=dt_socket,address=5005,server=y,suspend=y \ + -Xms1g \ + -Xmx4g \ + -jar java/serving/target/feast-serving-0.17.1-SNAPSHOT-jar-with-dependencies.jar \ + classpath:/application.yml,file:./application-override.yaml + ``` +Now you can attach e.g. a Remote debugger in IntelliJ to port 5005 to debug / make breakpoints. + +### Unit / Integration Tests +Unit & Integration Tests can be used to verify functionality: +```sh +# run unit tests +mvn test -pl serving --also-make +# run integration tests +mvn verify -pl serving --also-make +``` \ No newline at end of file diff --git a/java/serving/pom.xml b/java/serving/pom.xml new file mode 100644 index 0000000000..47a636d7d5 --- /dev/null +++ b/java/serving/pom.xml @@ -0,0 +1,390 @@ + + + + 4.0.0 + + + dev.feast + feast-parent + ${revision} + + + feast-serving + Feast Serving + Feature serving API service + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 11 + + + + + org.jacoco + jacoco-maven-plugin + + + + org.apache.maven.plugins + maven-jar-plugin + 3.2.2 + + + + true + feast.serving.ServingGuiceApplication + + + + + + + org.apache.maven.plugins + maven-assembly-plugin + + + package + + single + + + + + + jar-with-dependencies + + + + feast.serving.ServingGuiceApplication + + + + + + + + + + + dev.feast + feast-datatypes + ${project.version} + + + + dev.feast + feast-common + ${project.version} + + + + dev.feast + feast-storage-api + ${project.version} + + + + dev.feast + feast-storage-connector-redis + ${project.version} + + + + com.google.inject + guice + 5.0.1 + + + + + org.slf4j + slf4j-api + + + + org.slf4j + slf4j-simple + 1.7.30 + + + + org.apache.logging.log4j + log4j-web + ${log4jVersion} + + + + + io.grpc + grpc-services + ${grpc.version} + + + + io.grpc + grpc-stub + ${grpc.version} + + + io.grpc + grpc-netty-shaded + ${grpc.version} + + + + com.google.protobuf + protobuf-java-util + ${protobuf.version} + + + + + com.google.guava + guava + ${guava.version} + + + + joda-time + joda-time + ${joda.time.version} + + + + io.jaegertracing + jaeger-client + 1.3.2 + + + io.opentracing + opentracing-api + 0.33.0 + + + io.opentracing + opentracing-noop + 0.33.0 + + + io.opentracing.contrib + opentracing-grpc + 0.2.3 + + + + + io.prometheus + simpleclient + 0.8.0 + + + + + io.prometheus + simpleclient_hotspot + 0.8.0 + + + + + io.prometheus + simpleclient_servlet + 0.8.0 + + + + com.google.auto.value + auto-value-annotations + ${auto.value.version} + + + + com.google.cloud + google-cloud-storage + 1.118.0 + + + + com.google.cloud + google-cloud-nio + 0.123.10 + test + + + + com.amazonaws + aws-java-sdk-s3 + 1.12.110 + + + + com.adobe.testing + s3mock-testcontainers + 2.2.3 + test + + + + + io.grpc + grpc-testing + ${grpc.version} + + + + org.mockito + mockito-core + ${mockito.version} + test + + + + + org.apache.avro + avro + 1.10.2 + + + + + org.apache.arrow + arrow-java-root + 5.0.0 + pom + + + + + org.apache.arrow + arrow-vector + 5.0.0 + + + + + org.apache.arrow + arrow-memory + 5.0.0 + pom + + + + + org.apache.arrow + arrow-memory-netty + 5.0.0 + runtime + + + + + com.fasterxml.jackson.dataformat + jackson-dataformat-yaml + ${jackson.version} + + + + com.fasterxml.jackson.core + jackson-annotations + ${jackson.version} + + + + com.github.kstyrc + embedded-redis + 0.6 + test + + + jakarta.validation + jakarta.validation-api + ${jakarta.validation.api.version} + + + org.testcontainers + testcontainers + 1.16.2 + test + + + org.testcontainers + junit-jupiter + 1.16.2 + test + + + org.awaitility + awaitility + 4.1.1 + test + + + com.squareup.okhttp + okhttp + 2.7.4 + test + + + + + + profile-local + + + !ci + + + + + + org.apache.maven.plugins + maven-resources-plugin + 3.1.0 + + + @ + + false + + + + org.sonatype.plugins + nexus-staging-maven-plugin + + true + + + + + + + + src/main/resources + true + + + + + + diff --git a/java/serving/src/main/java/feast/serving/ServingGuiceApplication.java b/java/serving/src/main/java/feast/serving/ServingGuiceApplication.java new file mode 100644 index 0000000000..664d6dd4ec --- /dev/null +++ b/java/serving/src/main/java/feast/serving/ServingGuiceApplication.java @@ -0,0 +1,46 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2021 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving; + +import com.google.inject.Guice; +import com.google.inject.Injector; +import feast.serving.config.*; +import io.grpc.Server; +import java.io.IOException; + +public class ServingGuiceApplication { + + public static void main(String[] args) throws InterruptedException, IOException { + if (args.length == 0) { + throw new RuntimeException( + "Path to application configuration file needs to be specified via CLI"); + } + + final Injector i = + Guice.createInjector( + new ServingServiceConfigV2(), + new RegistryConfig(), + new InstrumentationConfig(), + new ServerModule(), + new ApplicationPropertiesModule(args)); + + Server server = i.getInstance(Server.class); + + server.start(); + server.awaitTermination(); + } +} diff --git a/java/serving/src/main/java/feast/serving/config/ApplicationProperties.java b/java/serving/src/main/java/feast/serving/config/ApplicationProperties.java new file mode 100644 index 0000000000..268592d20a --- /dev/null +++ b/java/serving/src/main/java/feast/serving/config/ApplicationProperties.java @@ -0,0 +1,436 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving.config; + +// Feast configuration properties that maps Feast configuration from default application.yml file to +// a Java object. +// https://www.baeldung.com/configuration-properties-in-spring-boot +// https://docs.spring.io/spring-boot/docs/current/reference/html/boot-features-external-config.html#boot-features-external-config-typesafe-configuration-properties + +import com.fasterxml.jackson.annotation.JsonMerge; +import com.fasterxml.jackson.annotation.OptBoolean; +import feast.common.logging.config.LoggingProperties; +import feast.storage.connectors.redis.retriever.RedisClusterStoreConfig; +import feast.storage.connectors.redis.retriever.RedisStoreConfig; +import io.lettuce.core.ReadFrom; +import java.time.Duration; +import java.util.*; +import javax.annotation.PostConstruct; +import javax.validation.*; +import javax.validation.constraints.NotBlank; +import javax.validation.constraints.NotNull; +import org.slf4j.Logger; + +/** Feast Serving properties. */ +public class ApplicationProperties { + private static final Logger log = org.slf4j.LoggerFactory.getLogger(ApplicationProperties.class); + + public static class FeastProperties { + /* Feast Serving build version */ + @NotBlank private String version = "unknown"; + + public void setRegistry(String registry) { + this.registry = registry; + } + + @NotBlank private String registry; + + public String getRegistry() { + return registry; + } + + @NotBlank private String project; + + public String getProject() { + return project; + } + + public void setProject(final String project) { + this.project = project; + } + + private int registryRefreshInterval; + + public int getRegistryRefreshInterval() { + return registryRefreshInterval; + } + + public void setRegistryRefreshInterval(int registryRefreshInterval) { + this.registryRefreshInterval = registryRefreshInterval; + } + + /** + * Finds and returns the active store + * + * @return Returns the {@link Store} model object + */ + public Store getActiveStore() { + for (Store store : getStores()) { + if (activeStore.equals(store.getName())) { + return store; + } + } + throw new RuntimeException( + String.format("Active store is misconfigured. Could not find store: %s.", activeStore)); + } + + public void setActiveStore(String activeStore) { + this.activeStore = activeStore; + } + + /** Name of the active store configuration (only one store can be active at a time). */ + @NotBlank private String activeStore; + + /** + * Collection of store configurations. The active store is selected by the "activeStore" field. + */ + @JsonMerge(OptBoolean.FALSE) + private List stores = new ArrayList<>(); + + /* Metric tracing properties. */ + private TracingProperties tracing; + + /* Feast Audit Logging properties */ + @NotNull private LoggingProperties logging; + + public void setStores(List stores) { + this.stores = stores; + } + + /** + * Gets Serving store configuration as a list of {@link Store}. + * + * @return List of stores objects + */ + public List getStores() { + return stores; + } + + /** + * Gets Feast Serving build version. + * + * @return the build version + */ + public String getVersion() { + return version; + } + + public void setTracing(TracingProperties tracing) { + this.tracing = tracing; + } + + /** + * Gets tracing properties + * + * @return tracing properties + */ + public TracingProperties getTracing() { + return tracing; + } + + /** + * Gets logging properties + * + * @return logging properties + */ + public LoggingProperties getLogging() { + return logging; + } + + private String gcpProject; + + public String getGcpProject() { + return gcpProject; + } + + public void setGcpProject(String gcpProject) { + this.gcpProject = gcpProject; + } + + public void setAwsRegion(String awsRegion) { + this.awsRegion = awsRegion; + } + + private String awsRegion; + + public String getAwsRegion() { + return awsRegion; + } + + private String transformationServiceEndpoint; + + public String getTransformationServiceEndpoint() { + return transformationServiceEndpoint; + } + + public void setTransformationServiceEndpoint(String transformationServiceEndpoint) { + this.transformationServiceEndpoint = transformationServiceEndpoint; + } + } + + private FeastProperties feast; + + public void setFeast(FeastProperties feast) { + this.feast = feast; + } + + public FeastProperties getFeast() { + return feast; + } + + /** Store configuration class for database that this Feast Serving uses. */ + public static class Store { + + private String name; + + private String type; + + private Map config = new HashMap<>(); + + // default construct for deserialization + public Store() {} + + public Store(String name, String type, Map config) { + this.name = name; + this.type = type; + this.config = config; + } + + /** + * Gets name of this store. This is unique to this specific instance. + * + * @return the name of the store + */ + public String getName() { + return name; + } + + /** + * Sets the name of this store. + * + * @param name the name of the store + */ + public void setName(String name) { + this.name = name; + } + + /** + * Gets the store type. Example are REDIS, REDIS_CLUSTER, BIGTABLE or CASSANDRA + * + * @return the store type as a String. + */ + public StoreType getType() { + return StoreType.valueOf(this.type); + } + + public void setType(String type) { + this.type = type; + } + + /** + * Gets the configuration to this specific store. This is a map of strings. These options are + * unique to the store. Please see protos/feast/core/Store.proto for the store specific + * configuration options + * + * @return Returns the store specific configuration + */ + public RedisClusterStoreConfig getRedisClusterConfig() { + String read_from; + if (!this.config.containsKey("read_from") || this.config.get("read_from") == null) { + log.info("'read_from' not defined in Redis cluster config, so setting to UPSTREAM"); + read_from = ReadFrom.UPSTREAM.toString(); + } else { + read_from = this.config.get("read_from"); + } + + if (!this.config.containsKey("timeout") || this.config.get("timeout") == null) { + throw new IllegalArgumentException( + "Redis cluster config does not have 'timeout' specified"); + } + + Boolean ssl = null; + if (!this.config.containsKey("ssl") || this.config.get("ssl") == null) { + log.info("'ssl' not defined in Redis cluster config, so setting to false"); + ssl = false; + } else { + ssl = Boolean.parseBoolean(this.config.get("ssl")); + } + Duration timeout = Duration.parse(this.config.get("timeout")); + return new RedisClusterStoreConfig( + this.config.get("connection_string"), + ReadFrom.valueOf(read_from), + timeout, + ssl, + this.config.getOrDefault("password", "")); + } + + public RedisStoreConfig getRedisConfig() { + return new RedisStoreConfig( + this.config.get("host"), + Integer.valueOf(this.config.get("port")), + Boolean.valueOf(this.config.getOrDefault("ssl", "false")), + this.config.getOrDefault("password", "")); + } + + public void setConfig(Map config) { + this.config = config; + } + } + + public static class Server { + private int port; + + public int getPort() { + return port; + } + + public void setPort(int port) { + this.port = port; + } + } + + public static class GrpcServer { + private Server server; + + public Server getServer() { + return server; + } + + public void setServer(Server server) { + this.server = server; + } + } + + public static class RestServer { + private Server server; + + public Server getServer() { + return server; + } + + public void setServer(Server server) { + this.server = server; + } + } + + private GrpcServer grpc; + private RestServer rest; + + public GrpcServer getGrpc() { + return grpc; + } + + public void setGrpc(GrpcServer grpc) { + this.grpc = grpc; + } + + public RestServer getRest() { + return rest; + } + + public void setRest(RestServer rest) { + this.rest = rest; + } + + public enum StoreType { + REDIS, + REDIS_CLUSTER; + } + + /** Trace metric collection properties */ + public static class TracingProperties { + + /** Tracing enabled/disabled */ + private boolean enabled; + + /** Name of tracer to use (only "jaeger") */ + private String tracerName; + + /** Service name uniquely identifies this Feast Serving deployment */ + private String serviceName; + + /** + * Is tracing enabled + * + * @return boolean flag + */ + public boolean isEnabled() { + return enabled; + } + + /** + * Sets tracing enabled or disabled. + * + * @param enabled flag + */ + public void setEnabled(boolean enabled) { + this.enabled = enabled; + } + + /** + * Gets tracer name ('jaeger') + * + * @return the tracer name + */ + public String getTracerName() { + return tracerName; + } + + /** + * Sets tracer name. + * + * @param tracerName the tracer name + */ + public void setTracerName(String tracerName) { + this.tracerName = tracerName; + } + + /** + * Gets the service name. The service name uniquely identifies this Feast serving instance. + * + * @return the service name + */ + public String getServiceName() { + return serviceName; + } + + /** + * Sets service name. + * + * @param serviceName the service name + */ + public void setServiceName(String serviceName) { + this.serviceName = serviceName; + } + } + + /** + * Validates all FeastProperties. This method runs after properties have been initialized and + * individually and conditionally validates each class. + */ + @PostConstruct + public void validate() { + ValidatorFactory factory = Validation.buildDefaultValidatorFactory(); + Validator validator = factory.getValidator(); + + // Validate root fields in FeastProperties + Set> violations = validator.validate(this); + if (!violations.isEmpty()) { + throw new ConstraintViolationException(violations); + } + } +} diff --git a/java/serving/src/main/java/feast/serving/config/ApplicationPropertiesModule.java b/java/serving/src/main/java/feast/serving/config/ApplicationPropertiesModule.java new file mode 100644 index 0000000000..07183fc710 --- /dev/null +++ b/java/serving/src/main/java/feast/serving/config/ApplicationPropertiesModule.java @@ -0,0 +1,75 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2021 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving.config; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.ObjectReader; +import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; +import com.google.common.io.Resources; +import com.google.inject.AbstractModule; +import com.google.inject.Provides; +import com.google.inject.Singleton; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; + +public class ApplicationPropertiesModule extends AbstractModule { + private final String[] args; + + public ApplicationPropertiesModule(String[] args) { + this.args = args; + } + + @Provides + @Singleton + public ApplicationProperties provideApplicationProperties() throws IOException { + ObjectMapper mapper = new ObjectMapper(new YAMLFactory()); + mapper.findAndRegisterModules(); + mapper.setDefaultMergeable(Boolean.TRUE); + + ApplicationProperties properties = new ApplicationProperties(); + ObjectReader objectReader = mapper.readerForUpdating(properties); + + String[] filePaths = this.args[0].split(","); + for (String filePath : filePaths) { + objectReader.readValue(readPropertiesFile(filePath)); + } + + return properties; + } + + /** + * Read file path in spring compatible format, eg classpath:/application.yml or + * file:/path/application.yml + */ + private byte[] readPropertiesFile(String filePath) throws IOException { + if (filePath.startsWith("classpath:")) { + filePath = filePath.substring("classpath:".length()); + if (filePath.startsWith("/")) { + filePath = filePath.substring(1); + } + + return Resources.toByteArray(Resources.getResource(filePath)); + } + + if (filePath.startsWith("file")) { + filePath = filePath.substring("file:".length()); + } + + return Files.readAllBytes(Path.of(filePath)); + } +} diff --git a/java/serving/src/main/java/feast/serving/config/InstrumentationConfig.java b/java/serving/src/main/java/feast/serving/config/InstrumentationConfig.java new file mode 100644 index 0000000000..7f8590bb84 --- /dev/null +++ b/java/serving/src/main/java/feast/serving/config/InstrumentationConfig.java @@ -0,0 +1,46 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving.config; + +import com.google.inject.AbstractModule; +import com.google.inject.Provides; +import io.opentracing.Tracer; +import io.opentracing.contrib.grpc.TracingServerInterceptor; +import io.opentracing.noop.NoopTracerFactory; + +public class InstrumentationConfig extends AbstractModule { + + @Provides + public Tracer tracer(ApplicationProperties applicationProperties) { + if (!applicationProperties.getFeast().getTracing().isEnabled()) { + return NoopTracerFactory.create(); + } + + if (!applicationProperties.getFeast().getTracing().getTracerName().equalsIgnoreCase("jaeger")) { + throw new IllegalArgumentException("Only 'jaeger' tracer is supported for now."); + } + + return io.jaegertracing.Configuration.fromEnv( + applicationProperties.getFeast().getTracing().getServiceName()) + .getTracer(); + } + + @Provides + public TracingServerInterceptor tracingInterceptor(Tracer tracer) { + return TracingServerInterceptor.newBuilder().withTracer(tracer).build(); + } +} diff --git a/java/serving/src/main/java/feast/serving/config/RegistryConfig.java b/java/serving/src/main/java/feast/serving/config/RegistryConfig.java new file mode 100644 index 0000000000..3e7cbe3f1f --- /dev/null +++ b/java/serving/src/main/java/feast/serving/config/RegistryConfig.java @@ -0,0 +1,74 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2021 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving.config; + +import com.amazonaws.services.s3.AmazonS3; +import com.amazonaws.services.s3.AmazonS3ClientBuilder; +import com.google.cloud.storage.Storage; +import com.google.cloud.storage.StorageOptions; +import com.google.inject.AbstractModule; +import com.google.inject.Provider; +import com.google.inject.Provides; +import feast.serving.registry.*; +import java.net.URI; +import java.util.Optional; + +public class RegistryConfig extends AbstractModule { + @Provides + Storage googleStorage(ApplicationProperties applicationProperties) { + return StorageOptions.newBuilder() + .setProjectId(applicationProperties.getFeast().getGcpProject()) + .build() + .getService(); + } + + @Provides + public AmazonS3 awsStorage(ApplicationProperties applicationProperties) { + return AmazonS3ClientBuilder.standard() + .withRegion(applicationProperties.getFeast().getAwsRegion()) + .build(); + } + + @Provides + RegistryFile registryFile( + ApplicationProperties applicationProperties, + Provider storageProvider, + Provider amazonS3Provider) { + + String registryPath = applicationProperties.getFeast().getRegistry(); + Optional scheme = Optional.ofNullable(URI.create(registryPath).getScheme()); + + switch (scheme.orElse("")) { + case "gs": + return new GSRegistryFile(storageProvider.get(), registryPath); + case "s3": + return new S3RegistryFile(amazonS3Provider.get(), registryPath); + case "": + case "file": + return new LocalRegistryFile(registryPath); + default: + throw new RuntimeException("Registry storage %s is unsupported"); + } + } + + @Provides + RegistryRepository registryRepository( + RegistryFile registryFile, ApplicationProperties applicationProperties) { + return new RegistryRepository( + registryFile, applicationProperties.getFeast().getRegistryRefreshInterval()); + } +} diff --git a/java/serving/src/main/java/feast/serving/config/ServerModule.java b/java/serving/src/main/java/feast/serving/config/ServerModule.java new file mode 100644 index 0000000000..5428306f2b --- /dev/null +++ b/java/serving/src/main/java/feast/serving/config/ServerModule.java @@ -0,0 +1,57 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2021 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving.config; + +import com.google.inject.AbstractModule; +import com.google.inject.Provides; +import feast.serving.controller.HealthServiceController; +import feast.serving.grpc.OnlineServingGrpcServiceV2; +import feast.serving.service.ServingServiceV2; +import io.grpc.Server; +import io.grpc.ServerBuilder; +import io.grpc.health.v1.HealthGrpc; +import io.grpc.protobuf.services.ProtoReflectionService; +import io.opentracing.contrib.grpc.TracingServerInterceptor; + +public class ServerModule extends AbstractModule { + + @Override + protected void configure() { + bind(OnlineServingGrpcServiceV2.class); + } + + @Provides + public Server provideGrpcServer( + ApplicationProperties applicationProperties, + OnlineServingGrpcServiceV2 onlineServingGrpcServiceV2, + TracingServerInterceptor tracingServerInterceptor, + HealthGrpc.HealthImplBase healthImplBase) { + ServerBuilder serverBuilder = + ServerBuilder.forPort(applicationProperties.getGrpc().getServer().getPort()); + serverBuilder + .addService(ProtoReflectionService.newInstance()) + .addService(tracingServerInterceptor.intercept(onlineServingGrpcServiceV2)) + .addService(healthImplBase); + + return serverBuilder.build(); + } + + @Provides + public HealthGrpc.HealthImplBase healthService(ServingServiceV2 servingServiceV2) { + return new HealthServiceController(servingServiceV2); + } +} diff --git a/java/serving/src/main/java/feast/serving/config/ServingServiceConfigV2.java b/java/serving/src/main/java/feast/serving/config/ServingServiceConfigV2.java new file mode 100644 index 0000000000..4ea0692ccd --- /dev/null +++ b/java/serving/src/main/java/feast/serving/config/ServingServiceConfigV2.java @@ -0,0 +1,86 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2020 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving.config; + +import com.google.inject.AbstractModule; +import com.google.inject.Provides; +import feast.serving.registry.*; +import feast.serving.service.OnlineServingServiceV2; +import feast.serving.service.OnlineTransformationService; +import feast.serving.service.ServingServiceV2; +import feast.storage.api.retriever.OnlineRetrieverV2; +import feast.storage.connectors.redis.retriever.*; +import io.opentracing.Tracer; +import org.slf4j.Logger; + +public class ServingServiceConfigV2 extends AbstractModule { + private static final Logger log = org.slf4j.LoggerFactory.getLogger(ServingServiceConfigV2.class); + + @Provides + public ServingServiceV2 registryBasedServingServiceV2( + ApplicationProperties applicationProperties, + RegistryRepository registryRepository, + Tracer tracer) { + final ServingServiceV2 servingService; + final ApplicationProperties.Store store = applicationProperties.getFeast().getActiveStore(); + + OnlineRetrieverV2 retrieverV2; + // TODO: Support more store types, and potentially use a plugin model here. + switch (store.getType()) { + case REDIS_CLUSTER: + RedisClientAdapter redisClusterClient = + RedisClusterClient.create(store.getRedisClusterConfig()); + retrieverV2 = + new OnlineRetriever( + applicationProperties.getFeast().getProject(), + redisClusterClient, + new EntityKeySerializerV2()); + break; + case REDIS: + RedisClientAdapter redisClient = RedisClient.create(store.getRedisConfig()); + log.info("Created EntityKeySerializerV2"); + retrieverV2 = + new OnlineRetriever( + applicationProperties.getFeast().getProject(), + redisClient, + new EntityKeySerializerV2()); + break; + default: + throw new RuntimeException( + String.format( + "Unable to identify online store type: %s for Registry Backed Serving Service", + store.getType())); + } + + log.info("Working Directory = " + System.getProperty("user.dir")); + + final OnlineTransformationService onlineTransformationService = + new OnlineTransformationService( + applicationProperties.getFeast().getTransformationServiceEndpoint(), + registryRepository); + + servingService = + new OnlineServingServiceV2( + retrieverV2, + tracer, + registryRepository, + onlineTransformationService, + applicationProperties.getFeast().getProject()); + + return servingService; + } +} diff --git a/java/serving/src/main/java/feast/serving/controller/HealthServiceController.java b/java/serving/src/main/java/feast/serving/controller/HealthServiceController.java new file mode 100644 index 0000000000..2f98ae032f --- /dev/null +++ b/java/serving/src/main/java/feast/serving/controller/HealthServiceController.java @@ -0,0 +1,55 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving.controller; + +import com.google.inject.Inject; +import feast.proto.serving.ServingAPIProto.GetFeastServingInfoRequest; +import feast.serving.service.ServingServiceV2; +import io.grpc.health.v1.HealthGrpc.HealthImplBase; +import io.grpc.health.v1.HealthProto.HealthCheckRequest; +import io.grpc.health.v1.HealthProto.HealthCheckResponse; +import io.grpc.health.v1.HealthProto.ServingStatus; +import io.grpc.stub.StreamObserver; + +// Reference: https://github.com/grpc/grpc/blob/master/doc/health-checking.md +public class HealthServiceController extends HealthImplBase { + private final ServingServiceV2 servingService; + + @Inject + public HealthServiceController(final ServingServiceV2 servingService) { + this.servingService = servingService; + } + + @Override + public void check( + HealthCheckRequest request, StreamObserver responseObserver) { + // TODO: Implement proper logic to determine if ServingServiceV2 is healthy e.g. + // if it's online service check that it the service can retrieve dummy/random + // feature table. + // Implement similarly for batch service. + + try { + servingService.getFeastServingInfo(GetFeastServingInfoRequest.getDefaultInstance()); + responseObserver.onNext( + HealthCheckResponse.newBuilder().setStatus(ServingStatus.SERVING).build()); + } catch (Exception e) { + responseObserver.onNext( + HealthCheckResponse.newBuilder().setStatus(ServingStatus.NOT_SERVING).build()); + } + responseObserver.onCompleted(); + } +} diff --git a/java/serving/src/main/java/feast/serving/exception/SpecRetrievalException.java b/java/serving/src/main/java/feast/serving/exception/SpecRetrievalException.java new file mode 100644 index 0000000000..fbcba969fa --- /dev/null +++ b/java/serving/src/main/java/feast/serving/exception/SpecRetrievalException.java @@ -0,0 +1,32 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving.exception; + +/** Application-specific exception for any failure of retrieving feature/entity/storage spec. */ +public class SpecRetrievalException extends RuntimeException { + public SpecRetrievalException() { + super(); + } + + public SpecRetrievalException(String message) { + super(message); + } + + public SpecRetrievalException(String message, Throwable cause) { + super(message, cause); + } +} diff --git a/java/serving/src/main/java/feast/serving/grpc/OnlineServingGrpcServiceV2.java b/java/serving/src/main/java/feast/serving/grpc/OnlineServingGrpcServiceV2.java new file mode 100644 index 0000000000..fe024404f3 --- /dev/null +++ b/java/serving/src/main/java/feast/serving/grpc/OnlineServingGrpcServiceV2.java @@ -0,0 +1,64 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2021 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving.grpc; + +import feast.proto.serving.ServingAPIProto; +import feast.proto.serving.ServingServiceGrpc; +import feast.serving.service.ServingServiceV2; +import io.grpc.Status; +import io.grpc.stub.StreamObserver; +import javax.inject.Inject; +import org.slf4j.Logger; + +public class OnlineServingGrpcServiceV2 extends ServingServiceGrpc.ServingServiceImplBase { + private final ServingServiceV2 servingServiceV2; + private static final Logger log = + org.slf4j.LoggerFactory.getLogger(OnlineServingGrpcServiceV2.class); + + @Inject + OnlineServingGrpcServiceV2(ServingServiceV2 servingServiceV2) { + this.servingServiceV2 = servingServiceV2; + } + + @Override + public void getFeastServingInfo( + ServingAPIProto.GetFeastServingInfoRequest request, + StreamObserver responseObserver) { + try { + responseObserver.onNext(this.servingServiceV2.getFeastServingInfo(request)); + responseObserver.onCompleted(); + } catch (RuntimeException e) { + log.warn("Failed to get Serving Info", e); + responseObserver.onError( + Status.INTERNAL.withDescription(e.getMessage()).withCause(e).asRuntimeException()); + } + } + + @Override + public void getOnlineFeatures( + ServingAPIProto.GetOnlineFeaturesRequest request, + StreamObserver responseObserver) { + try { + responseObserver.onNext(this.servingServiceV2.getOnlineFeatures(request)); + responseObserver.onCompleted(); + } catch (RuntimeException e) { + log.warn("Failed to get Online Features", e); + responseObserver.onError( + Status.INTERNAL.withDescription(e.getMessage()).withCause(e).asRuntimeException()); + } + } +} diff --git a/java/serving/src/main/java/feast/serving/interceptors/GrpcMonitoringContext.java b/java/serving/src/main/java/feast/serving/interceptors/GrpcMonitoringContext.java new file mode 100644 index 0000000000..48d8d76a91 --- /dev/null +++ b/java/serving/src/main/java/feast/serving/interceptors/GrpcMonitoringContext.java @@ -0,0 +1,47 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2021 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving.interceptors; + +import java.util.Optional; + +public class GrpcMonitoringContext { + private static GrpcMonitoringContext INSTANCE; + + final ThreadLocal project = new ThreadLocal(); + + private GrpcMonitoringContext() {} + + public static GrpcMonitoringContext getInstance() { + if (INSTANCE == null) { + INSTANCE = new GrpcMonitoringContext(); + } + + return INSTANCE; + } + + public void setProject(String name) { + this.project.set(name); + } + + public Optional getProject() { + return Optional.ofNullable(this.project.get()); + } + + public void clearProject() { + this.project.set(null); + } +} diff --git a/java/serving/src/main/java/feast/serving/interceptors/GrpcMonitoringInterceptor.java b/java/serving/src/main/java/feast/serving/interceptors/GrpcMonitoringInterceptor.java new file mode 100644 index 0000000000..735f8c556d --- /dev/null +++ b/java/serving/src/main/java/feast/serving/interceptors/GrpcMonitoringInterceptor.java @@ -0,0 +1,60 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2020 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving.interceptors; + +import feast.serving.util.Metrics; +import io.grpc.ForwardingServerCall.SimpleForwardingServerCall; +import io.grpc.Metadata; +import io.grpc.ServerCall; +import io.grpc.ServerCall.Listener; +import io.grpc.ServerCallHandler; +import io.grpc.ServerInterceptor; +import io.grpc.Status; +import java.util.Optional; + +/** + * GrpcMonitoringInterceptor intercepts GRPC calls to provide request latency histogram metrics in + * the Prometheus client. + */ +public class GrpcMonitoringInterceptor implements ServerInterceptor { + + @Override + public Listener interceptCall( + ServerCall call, Metadata headers, ServerCallHandler next) { + + long startCallMillis = System.currentTimeMillis(); + String fullMethodName = call.getMethodDescriptor().getFullMethodName(); + String methodName = fullMethodName.substring(fullMethodName.indexOf("/") + 1); + + GrpcMonitoringContext.getInstance().clearProject(); + + return next.startCall( + new SimpleForwardingServerCall(call) { + @Override + public void close(Status status, Metadata trailers) { + Optional projectName = GrpcMonitoringContext.getInstance().getProject(); + + Metrics.requestLatency + .labels(methodName, projectName.orElse("")) + .observe((System.currentTimeMillis() - startCallMillis) / 1000f); + Metrics.grpcRequestCount.labels(methodName, status.getCode().name()).inc(); + super.close(status, trailers); + } + }, + headers); + } +} diff --git a/java/serving/src/main/java/feast/serving/registry/GSRegistryFile.java b/java/serving/src/main/java/feast/serving/registry/GSRegistryFile.java new file mode 100644 index 0000000000..7772f695bd --- /dev/null +++ b/java/serving/src/main/java/feast/serving/registry/GSRegistryFile.java @@ -0,0 +1,62 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2021 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving.registry; + +import com.google.cloud.storage.*; +import com.google.protobuf.InvalidProtocolBufferException; +import feast.proto.core.RegistryProto; +import java.util.Optional; + +public class GSRegistryFile implements RegistryFile { + private Blob blob; + + public GSRegistryFile(Storage storage, String url) { + blob = storage.get(BlobId.fromGsUtilUri(url)); + if (blob == null) { + throw new RuntimeException(String.format("Registry file %s was not found", url)); + } + } + + public RegistryProto.Registry getContent() { + try { + return RegistryProto.Registry.parseFrom(blob.getContent()); + } catch (InvalidProtocolBufferException e) { + throw new RuntimeException( + String.format( + "Couldn't read remote registry: %s. Error: %s", + blob.getBlobId().toGsUtilUri(), e.getMessage())); + } + } + + public Optional getContentIfModified() { + try { + this.blob = blob.reload(Blob.BlobSourceOption.generationNotMatch()); + } catch (StorageException e) { + if (e.getCode() == 304) { + // Content not modified + return Optional.empty(); + } else { + throw new RuntimeException( + String.format( + "Couldn't read remote registry: %s. Error: %s", + blob.getBlobId().toGsUtilUri(), e.getMessage())); + } + } + + return Optional.of(this.getContent()); + } +} diff --git a/java/serving/src/main/java/feast/serving/registry/LocalRegistryFile.java b/java/serving/src/main/java/feast/serving/registry/LocalRegistryFile.java new file mode 100644 index 0000000000..b0d6b10bfc --- /dev/null +++ b/java/serving/src/main/java/feast/serving/registry/LocalRegistryFile.java @@ -0,0 +1,51 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2021 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving.registry; + +import com.google.protobuf.InvalidProtocolBufferException; +import feast.proto.core.RegistryProto; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.Optional; + +public class LocalRegistryFile implements RegistryFile { + private RegistryProto.Registry cachedRegistry; + + public LocalRegistryFile(String path) { + try { + cachedRegistry = RegistryProto.Registry.parseFrom(Files.readAllBytes(Paths.get(path))); + } catch (InvalidProtocolBufferException e) { + throw new RuntimeException( + String.format( + "Couldn't read local registry: %s. Protobuf is invalid: %s", path, e.getMessage())); + } catch (IOException e) { + throw new RuntimeException( + String.format("Couldn't read local registry file: %s. Error: %s", path, e.getMessage())); + } + } + + @Override + public RegistryProto.Registry getContent() { + return this.cachedRegistry; + } + + @Override + public Optional getContentIfModified() { + return Optional.empty(); + } +} diff --git a/java/serving/src/main/java/feast/serving/registry/Registry.java b/java/serving/src/main/java/feast/serving/registry/Registry.java new file mode 100644 index 0000000000..37fae3d8dc --- /dev/null +++ b/java/serving/src/main/java/feast/serving/registry/Registry.java @@ -0,0 +1,115 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2021 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving.registry; + +import feast.proto.core.*; +import feast.proto.serving.ServingAPIProto; +import feast.serving.exception.SpecRetrievalException; +import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; + +public class Registry { + private final RegistryProto.Registry registry; + private Map featureViewNameToSpec; + private Map + onDemandFeatureViewNameToSpec; + private Map featureServiceNameToSpec; + + Registry(RegistryProto.Registry registry) { + this.registry = registry; + List featureViewSpecs = + registry.getFeatureViewsList().stream() + .map(fv -> fv.getSpec()) + .collect(Collectors.toList()); + this.featureViewNameToSpec = + featureViewSpecs.stream() + .collect( + Collectors.toMap(FeatureViewProto.FeatureViewSpec::getName, Function.identity())); + List onDemandFeatureViewSpecs = + registry.getOnDemandFeatureViewsList().stream() + .map(odfv -> odfv.getSpec()) + .collect(Collectors.toList()); + this.onDemandFeatureViewNameToSpec = + onDemandFeatureViewSpecs.stream() + .collect( + Collectors.toMap( + OnDemandFeatureViewProto.OnDemandFeatureViewSpec::getName, + Function.identity())); + this.featureServiceNameToSpec = + registry.getFeatureServicesList().stream() + .map(fs -> fs.getSpec()) + .collect( + Collectors.toMap( + FeatureServiceProto.FeatureServiceSpec::getName, Function.identity())); + } + + public RegistryProto.Registry getRegistry() { + return this.registry; + } + + public FeatureViewProto.FeatureViewSpec getFeatureViewSpec( + ServingAPIProto.FeatureReferenceV2 featureReference) { + String featureViewName = featureReference.getFeatureViewName(); + if (featureViewNameToSpec.containsKey(featureViewName)) { + return featureViewNameToSpec.get(featureViewName); + } + throw new SpecRetrievalException( + String.format("Unable to find feature view with name: %s", featureViewName)); + } + + public FeatureProto.FeatureSpecV2 getFeatureSpec( + ServingAPIProto.FeatureReferenceV2 featureReference) { + final FeatureViewProto.FeatureViewSpec spec = this.getFeatureViewSpec(featureReference); + for (final FeatureProto.FeatureSpecV2 featureSpec : spec.getFeaturesList()) { + if (featureSpec.getName().equals(featureReference.getFeatureName())) { + return featureSpec; + } + } + + throw new SpecRetrievalException( + String.format( + "Unable to find feature with name: %s in feature view: %s", + featureReference.getFeatureName(), featureReference.getFeatureViewName())); + } + + public OnDemandFeatureViewProto.OnDemandFeatureViewSpec getOnDemandFeatureViewSpec( + ServingAPIProto.FeatureReferenceV2 featureReference) { + String onDemandFeatureViewName = featureReference.getFeatureViewName(); + if (onDemandFeatureViewNameToSpec.containsKey(onDemandFeatureViewName)) { + return onDemandFeatureViewNameToSpec.get(onDemandFeatureViewName); + } + throw new SpecRetrievalException( + String.format( + "Unable to find on demand feature view with name: %s", onDemandFeatureViewName)); + } + + public boolean isOnDemandFeatureReference(ServingAPIProto.FeatureReferenceV2 featureReference) { + String onDemandFeatureViewName = featureReference.getFeatureViewName(); + return onDemandFeatureViewNameToSpec.containsKey(onDemandFeatureViewName); + } + + public FeatureServiceProto.FeatureServiceSpec getFeatureServiceSpec(String name) { + FeatureServiceProto.FeatureServiceSpec spec = featureServiceNameToSpec.get(name); + if (spec == null) { + throw new SpecRetrievalException( + String.format("Unable to find feature service with name: %s", name)); + } + return spec; + } +} diff --git a/java/serving/src/main/java/feast/serving/registry/RegistryFile.java b/java/serving/src/main/java/feast/serving/registry/RegistryFile.java new file mode 100644 index 0000000000..88a7535291 --- /dev/null +++ b/java/serving/src/main/java/feast/serving/registry/RegistryFile.java @@ -0,0 +1,26 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2021 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving.registry; + +import feast.proto.core.RegistryProto; +import java.util.Optional; + +public interface RegistryFile { + RegistryProto.Registry getContent(); + + Optional getContentIfModified(); +} diff --git a/java/serving/src/main/java/feast/serving/registry/RegistryRepository.java b/java/serving/src/main/java/feast/serving/registry/RegistryRepository.java new file mode 100644 index 0000000000..369493ee0f --- /dev/null +++ b/java/serving/src/main/java/feast/serving/registry/RegistryRepository.java @@ -0,0 +1,105 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2021 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving.registry; + +import com.google.protobuf.Duration; +import feast.proto.core.FeatureProto; +import feast.proto.core.FeatureServiceProto; +import feast.proto.core.FeatureViewProto; +import feast.proto.core.OnDemandFeatureViewProto; +import feast.proto.core.RegistryProto; +import feast.proto.serving.ServingAPIProto; +import java.util.List; +import java.util.Optional; +import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; + +/* + * + * Read-only access to Feast registry. + * Registry is obtained by calling specific storage implementation: eg, Local, GCS, AWS S3. + * All data is being then cached. + * It is possible to refresh registry (reload from storage) on configured interval. + * + * */ +public class RegistryRepository { + private Registry registry; + private RegistryFile registryFile; + + public RegistryRepository(RegistryFile registryFile, int refreshIntervalSecs) { + this.registryFile = registryFile; + this.registry = new Registry(this.registryFile.getContent()); + + if (refreshIntervalSecs > 0) { + setupPeriodicalRefresh(refreshIntervalSecs); + } + } + + public RegistryRepository(Registry registry) { + this.registry = registry; + } + + private void setupPeriodicalRefresh(int seconds) { + Executors.newSingleThreadScheduledExecutor( + r -> { + Thread t = Executors.defaultThreadFactory().newThread(r); + t.setDaemon(true); + return t; + }) + .scheduleWithFixedDelay(this::refresh, seconds, seconds, TimeUnit.SECONDS); + } + + private void refresh() { + Optional registryProto = this.registryFile.getContentIfModified(); + if (registryProto.isEmpty()) { + return; + } + + this.registry = new Registry(registryProto.get()); + } + + public FeatureViewProto.FeatureViewSpec getFeatureViewSpec( + ServingAPIProto.FeatureReferenceV2 featureReference) { + return this.registry.getFeatureViewSpec(featureReference); + } + + public FeatureProto.FeatureSpecV2 getFeatureSpec( + ServingAPIProto.FeatureReferenceV2 featureReference) { + return this.registry.getFeatureSpec(featureReference); + } + + public OnDemandFeatureViewProto.OnDemandFeatureViewSpec getOnDemandFeatureViewSpec( + ServingAPIProto.FeatureReferenceV2 featureReference) { + return this.registry.getOnDemandFeatureViewSpec(featureReference); + } + + public boolean isOnDemandFeatureReference(ServingAPIProto.FeatureReferenceV2 featureReference) { + return this.registry.isOnDemandFeatureReference(featureReference); + } + + public FeatureServiceProto.FeatureServiceSpec getFeatureServiceSpec(String name) { + return this.registry.getFeatureServiceSpec(name); + } + + public Duration getMaxAge(ServingAPIProto.FeatureReferenceV2 featureReference) { + return getFeatureViewSpec(featureReference).getTtl(); + } + + public List getEntitiesList(ServingAPIProto.FeatureReferenceV2 featureReference) { + return getFeatureViewSpec(featureReference).getEntitiesList(); + } +} diff --git a/java/serving/src/main/java/feast/serving/registry/S3RegistryFile.java b/java/serving/src/main/java/feast/serving/registry/S3RegistryFile.java new file mode 100644 index 0000000000..4b122a5de0 --- /dev/null +++ b/java/serving/src/main/java/feast/serving/registry/S3RegistryFile.java @@ -0,0 +1,82 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2021 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving.registry; + +import com.amazonaws.AmazonServiceException; +import com.amazonaws.services.s3.AmazonS3; +import com.amazonaws.services.s3.model.GetObjectRequest; +import com.amazonaws.services.s3.model.S3Object; +import com.amazonaws.services.s3.model.S3ObjectInputStream; +import feast.proto.core.RegistryProto; +import java.io.IOException; +import java.util.Optional; + +public class S3RegistryFile implements RegistryFile { + private final AmazonS3 s3Client; + private S3Object s3Object; + + public S3RegistryFile(AmazonS3 s3Client, String url) { + this.s3Client = s3Client; + + String[] split = url.replace("s3://", "").split("/"); + String objectPath = String.join("/", java.util.Arrays.copyOfRange(split, 1, split.length)); + this.s3Object = this.s3Client.getObject(split[0], objectPath); + } + + @Override + public RegistryProto.Registry getContent() { + S3ObjectInputStream is = this.s3Object.getObjectContent(); + + try { + return RegistryProto.Registry.parseFrom(is); + } catch (IOException e) { + throw new RuntimeException( + String.format( + "Couldn't read remote registry file: %s. Error: %s", + String.format("s3://%s/%s", this.s3Object.getBucketName(), this.s3Object.getKey()), + e.getMessage())); + } finally { + try { + is.close(); + } catch (IOException e) { + e.printStackTrace(); + } + } + } + + @Override + public Optional getContentIfModified() { + GetObjectRequest request = + new GetObjectRequest(this.s3Object.getBucketName(), this.s3Object.getKey()) + .withNonmatchingETagConstraint(this.s3Object.getObjectMetadata().getETag()); + + S3Object updatedS3Object; + try { + updatedS3Object = this.s3Client.getObject(request); + } catch (AmazonServiceException e) { + e.printStackTrace(); + return Optional.empty(); + } + + if (updatedS3Object == null) { + return Optional.empty(); + } + + this.s3Object = updatedS3Object; + return Optional.of(this.getContent()); + } +} diff --git a/java/serving/src/main/java/feast/serving/service/OnlineServingServiceV2.java b/java/serving/src/main/java/feast/serving/service/OnlineServingServiceV2.java new file mode 100644 index 0000000000..12e8a5b158 --- /dev/null +++ b/java/serving/src/main/java/feast/serving/service/OnlineServingServiceV2.java @@ -0,0 +1,412 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2020 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving.service; + +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.google.protobuf.Duration; +import com.google.protobuf.Timestamp; +import feast.common.models.Feature; +import feast.proto.core.FeatureServiceProto; +import feast.proto.serving.ServingAPIProto; +import feast.proto.serving.ServingAPIProto.FeatureReferenceV2; +import feast.proto.serving.ServingAPIProto.FieldStatus; +import feast.proto.serving.ServingAPIProto.GetFeastServingInfoRequest; +import feast.proto.serving.ServingAPIProto.GetFeastServingInfoResponse; +import feast.proto.serving.TransformationServiceAPIProto.TransformFeaturesRequest; +import feast.proto.serving.TransformationServiceAPIProto.TransformFeaturesResponse; +import feast.proto.serving.TransformationServiceAPIProto.ValueType; +import feast.proto.types.ValueProto; +import feast.serving.registry.RegistryRepository; +import feast.serving.util.Metrics; +import feast.storage.api.retriever.OnlineRetrieverV2; +import io.grpc.Status; +import io.opentracing.Span; +import io.opentracing.Tracer; +import java.util.*; +import java.util.stream.Collectors; +import org.apache.commons.lang3.tuple.Pair; +import org.slf4j.Logger; + +public class OnlineServingServiceV2 implements ServingServiceV2 { + + private static final Logger log = org.slf4j.LoggerFactory.getLogger(OnlineServingServiceV2.class); + private final Tracer tracer; + private final OnlineRetrieverV2 retriever; + private final RegistryRepository registryRepository; + private final OnlineTransformationService onlineTransformationService; + private final String project; + + public OnlineServingServiceV2( + OnlineRetrieverV2 retriever, + Tracer tracer, + RegistryRepository registryRepository, + OnlineTransformationService onlineTransformationService, + String project) { + this.retriever = retriever; + this.tracer = tracer; + this.registryRepository = registryRepository; + this.onlineTransformationService = onlineTransformationService; + this.project = project; + } + + /** {@inheritDoc} */ + @Override + public GetFeastServingInfoResponse getFeastServingInfo( + GetFeastServingInfoRequest getFeastServingInfoRequest) { + return GetFeastServingInfoResponse.getDefaultInstance(); + } + + @Override + public ServingAPIProto.GetOnlineFeaturesResponse getOnlineFeatures( + ServingAPIProto.GetOnlineFeaturesRequest request) { + // Split all feature references into non-ODFV (e.g. batch and stream) references and ODFV. + List allFeatureReferences = getFeaturesList(request); + List retrievedFeatureReferences = + allFeatureReferences.stream() + .filter(r -> !this.registryRepository.isOnDemandFeatureReference(r)) + .collect(Collectors.toList()); + int userRequestedFeaturesSize = retrievedFeatureReferences.size(); + + List onDemandFeatureReferences = + allFeatureReferences.stream() + .filter(r -> this.registryRepository.isOnDemandFeatureReference(r)) + .collect(Collectors.toList()); + + // ToDo (pyalex): refactor transformation service to delete unused left part of the returned + // Pair from extractRequestDataFeatureNamesAndOnDemandFeatureSources. + // Currently, we can retrieve context variables directly from GetOnlineFeaturesRequest. + List onDemandFeatureSources = + this.onlineTransformationService.extractOnDemandFeaturesDependencies( + onDemandFeatureReferences); + + // Add on demand feature sources to list of feature references to retrieve. + for (FeatureReferenceV2 onDemandFeatureSource : onDemandFeatureSources) { + if (!retrievedFeatureReferences.contains(onDemandFeatureSource)) { + retrievedFeatureReferences.add(onDemandFeatureSource); + } + } + + List> entityRows = getEntityRows(request); + + List entityNames; + if (retrievedFeatureReferences.size() > 0) { + entityNames = this.registryRepository.getEntitiesList(retrievedFeatureReferences.get(0)); + } else { + throw new RuntimeException("Requested features list must not be empty"); + } + + Span storageRetrievalSpan = tracer.buildSpan("storageRetrieval").start(); + if (storageRetrievalSpan != null) { + storageRetrievalSpan.setTag("entities", entityRows.size()); + storageRetrievalSpan.setTag("features", retrievedFeatureReferences.size()); + } + List> features = + retriever.getOnlineFeatures(entityRows, retrievedFeatureReferences, entityNames); + + if (storageRetrievalSpan != null) { + storageRetrievalSpan.finish(); + } + if (features.size() != entityRows.size()) { + throw Status.INTERNAL + .withDescription( + "The no. of FeatureRow obtained from OnlineRetriever" + + "does not match no. of entityRow passed.") + .asRuntimeException(); + } + + Span postProcessingSpan = tracer.buildSpan("postProcessing").start(); + + ServingAPIProto.GetOnlineFeaturesResponse.Builder responseBuilder = + ServingAPIProto.GetOnlineFeaturesResponse.newBuilder(); + + Timestamp now = Timestamp.newBuilder().setSeconds(System.currentTimeMillis() / 1000).build(); + Timestamp nullTimestamp = Timestamp.newBuilder().build(); + ValueProto.Value nullValue = ValueProto.Value.newBuilder().build(); + + for (int featureIdx = 0; featureIdx < userRequestedFeaturesSize; featureIdx++) { + FeatureReferenceV2 featureReference = retrievedFeatureReferences.get(featureIdx); + + ValueProto.ValueType.Enum valueType = + this.registryRepository.getFeatureSpec(featureReference).getValueType(); + + Duration maxAge = this.registryRepository.getMaxAge(featureReference); + + ServingAPIProto.GetOnlineFeaturesResponse.FeatureVector.Builder vectorBuilder = + responseBuilder.addResultsBuilder(); + + for (int rowIdx = 0; rowIdx < features.size(); rowIdx++) { + feast.storage.api.retriever.Feature feature = features.get(rowIdx).get(featureIdx); + if (feature == null) { + vectorBuilder.addValues(nullValue); + vectorBuilder.addStatuses(FieldStatus.NOT_FOUND); + vectorBuilder.addEventTimestamps(nullTimestamp); + continue; + } + + ValueProto.Value featureValue = feature.getFeatureValue(valueType); + if (featureValue == null) { + vectorBuilder.addValues(nullValue); + vectorBuilder.addStatuses(FieldStatus.NOT_FOUND); + vectorBuilder.addEventTimestamps(nullTimestamp); + continue; + } + + vectorBuilder.addValues(featureValue); + vectorBuilder.addStatuses( + getFeatureStatus(featureValue, checkOutsideMaxAge(feature, now, maxAge))); + vectorBuilder.addEventTimestamps(feature.getEventTimestamp()); + } + + populateCountMetrics(featureReference, vectorBuilder); + } + + responseBuilder.setMetadata( + ServingAPIProto.GetOnlineFeaturesResponseMetadata.newBuilder() + .setFeatureNames( + ServingAPIProto.FeatureList.newBuilder() + .addAllVal( + retrievedFeatureReferences.stream() + .map(Feature::getFeatureReference) + .collect(Collectors.toList())))); + + if (postProcessingSpan != null) { + postProcessingSpan.finish(); + } + + if (!onDemandFeatureReferences.isEmpty()) { + // Handle ODFVs. For each ODFV reference, we send a TransformFeaturesRequest to the FTS. + // The request should contain the entity data, the retrieved features, and the request context + // data. + this.populateOnDemandFeatures( + onDemandFeatureReferences, + onDemandFeatureSources, + retrievedFeatureReferences, + request, + features, + responseBuilder); + } + + populateHistogramMetrics(entityRows, retrievedFeatureReferences); + populateFeatureCountMetrics(retrievedFeatureReferences); + + return responseBuilder.build(); + } + + private List getFeaturesList( + ServingAPIProto.GetOnlineFeaturesRequest request) { + if (request.getFeatures().getValCount() > 0) { + return request.getFeatures().getValList().stream() + .map(Feature::parseFeatureReference) + .collect(Collectors.toList()); + } + + FeatureServiceProto.FeatureServiceSpec featureServiceSpec = + this.registryRepository.getFeatureServiceSpec(request.getFeatureService()); + + return featureServiceSpec.getFeaturesList().stream() + .flatMap( + featureViewProjection -> + featureViewProjection.getFeatureColumnsList().stream() + .map( + f -> + FeatureReferenceV2.newBuilder() + .setFeatureViewName(featureViewProjection.getFeatureViewName()) + .setFeatureName(f.getName()) + .build())) + .collect(Collectors.toList()); + } + + private List> getEntityRows( + ServingAPIProto.GetOnlineFeaturesRequest request) { + if (request.getEntitiesCount() == 0) { + throw new RuntimeException("Entities map shouldn't be empty"); + } + + Set entityNames = request.getEntitiesMap().keySet(); + String firstEntity = entityNames.stream().findFirst().get(); + int rowsCount = request.getEntitiesMap().get(firstEntity).getValCount(); + List> entityRows = Lists.newArrayListWithExpectedSize(rowsCount); + + for (Map.Entry entity : request.getEntitiesMap().entrySet()) { + for (int i = 0; i < rowsCount; i++) { + if (entityRows.size() < i + 1) { + entityRows.add(i, Maps.newHashMapWithExpectedSize(entityNames.size())); + } + + entityRows.get(i).put(entity.getKey(), entity.getValue().getVal(i)); + } + } + + return entityRows; + } + + private void populateOnDemandFeatures( + List onDemandFeatureReferences, + List onDemandFeatureSources, + List retrievedFeatureReferences, + ServingAPIProto.GetOnlineFeaturesRequest request, + List> features, + ServingAPIProto.GetOnlineFeaturesResponse.Builder responseBuilder) { + + List>> onDemandContext = + request.getRequestContextMap().entrySet().stream() + .map(e -> Pair.of(e.getKey(), e.getValue().getValList())) + .collect(Collectors.toList()); + + for (int featureIdx = 0; featureIdx < retrievedFeatureReferences.size(); featureIdx++) { + FeatureReferenceV2 featureReference = retrievedFeatureReferences.get(featureIdx); + + if (!onDemandFeatureSources.contains(featureReference)) { + continue; + } + + ValueProto.ValueType.Enum valueType = + this.registryRepository.getFeatureSpec(featureReference).getValueType(); + + List valueList = Lists.newArrayListWithExpectedSize(features.size()); + for (int rowIdx = 0; rowIdx < features.size(); rowIdx++) { + valueList.add(features.get(rowIdx).get(featureIdx).getFeatureValue(valueType)); + } + + onDemandContext.add( + Pair.of( + String.format( + "%s__%s", + featureReference.getFeatureViewName(), featureReference.getFeatureName()), + valueList)); + } + // Serialize the augmented values. + ValueType transformationInput = + this.onlineTransformationService.serializeValuesIntoArrowIPC(onDemandContext); + + // Send out requests to the FTS and process the responses. + Set onDemandFeatureStringReferences = + onDemandFeatureReferences.stream() + .map(r -> Feature.getFeatureReference(r)) + .collect(Collectors.toSet()); + + for (FeatureReferenceV2 featureReference : onDemandFeatureReferences) { + String onDemandFeatureViewName = featureReference.getFeatureViewName(); + TransformFeaturesRequest transformFeaturesRequest = + TransformFeaturesRequest.newBuilder() + .setOnDemandFeatureViewName(onDemandFeatureViewName) + .setTransformationInput(transformationInput) + .build(); + + TransformFeaturesResponse transformFeaturesResponse = + this.onlineTransformationService.transformFeatures(transformFeaturesRequest); + + this.onlineTransformationService.processTransformFeaturesResponse( + transformFeaturesResponse, + onDemandFeatureViewName, + onDemandFeatureStringReferences, + responseBuilder); + } + } + /** + * Generate Field level Status metadata for the given valueMap. + * + * @param value value to generate metadata for. + * @param isOutsideMaxAge whether the given valueMap contains values with age outside + * FeatureTable's max age. + * @return a 1:1 map keyed by field name containing field status metadata instead of values in the + * given valueMap. + */ + private static FieldStatus getFeatureStatus(ValueProto.Value value, boolean isOutsideMaxAge) { + + if (value == null) { + return FieldStatus.NOT_FOUND; + } else if (isOutsideMaxAge) { + return FieldStatus.OUTSIDE_MAX_AGE; + } else if (value.getValCase().equals(ValueProto.Value.ValCase.VAL_NOT_SET)) { + return FieldStatus.NULL_VALUE; + } + return FieldStatus.PRESENT; + } + + /** + * Determine if the feature data in the given feature row is outside maxAge. Data is outside + * maxAge to be when the difference ingestion time set in feature row and the retrieval time set + * in entity row exceeds FeatureTable max age. + * + * @param feature contains the ingestion timing and feature data. + * @param entityTimestamp contains the retrieval timing of when features are pulled. + * @param maxAge feature's max age. + */ + private static boolean checkOutsideMaxAge( + feast.storage.api.retriever.Feature feature, Timestamp entityTimestamp, Duration maxAge) { + + if (maxAge.equals(Duration.getDefaultInstance())) { // max age is not set + return false; + } + + long givenTimestamp = entityTimestamp.getSeconds(); + if (givenTimestamp == 0) { + givenTimestamp = System.currentTimeMillis() / 1000; + } + long timeDifference = givenTimestamp - feature.getEventTimestamp().getSeconds(); + return timeDifference > maxAge.getSeconds(); + } + + /** + * Populate histogram metrics that can be used for analysing online retrieval calls + * + * @param entityRows entity rows provided in request + * @param featureReferences feature references provided in request + */ + private void populateHistogramMetrics( + List> entityRows, List featureReferences) { + Metrics.requestEntityCountDistribution + .labels(this.project) + .observe(Double.valueOf(entityRows.size())); + Metrics.requestFeatureCountDistribution + .labels(this.project) + .observe(Double.valueOf(featureReferences.size())); + } + + /** + * Populate count metrics that can be used for analysing online retrieval calls + * + * @param featureRef singe Feature Reference + * @param featureVector Feature Vector built for this requested feature + */ + private void populateCountMetrics( + FeatureReferenceV2 featureRef, + ServingAPIProto.GetOnlineFeaturesResponse.FeatureVectorOrBuilder featureVector) { + String featureRefString = Feature.getFeatureReference(featureRef); + featureVector + .getStatusesList() + .forEach( + (status) -> { + if (status == FieldStatus.NOT_FOUND) { + Metrics.notFoundKeyCount.labels(this.project, featureRefString).inc(); + } + if (status == FieldStatus.OUTSIDE_MAX_AGE) { + Metrics.staleKeyCount.labels(this.project, featureRefString).inc(); + } + }); + } + + private void populateFeatureCountMetrics(List featureReferences) { + featureReferences.forEach( + featureReference -> + Metrics.requestFeatureCount + .labels(project, Feature.getFeatureReference(featureReference)) + .inc()); + } +} diff --git a/java/serving/src/main/java/feast/serving/service/OnlineTransformationService.java b/java/serving/src/main/java/feast/serving/service/OnlineTransformationService.java new file mode 100644 index 0000000000..365432b84e --- /dev/null +++ b/java/serving/src/main/java/feast/serving/service/OnlineTransformationService.java @@ -0,0 +1,328 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2021 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving.service; + +import com.google.common.collect.Lists; +import com.google.protobuf.ByteString; +import com.google.protobuf.Timestamp; +import feast.proto.core.*; +import feast.proto.serving.ServingAPIProto; +import feast.proto.serving.TransformationServiceAPIProto.TransformFeaturesRequest; +import feast.proto.serving.TransformationServiceAPIProto.TransformFeaturesResponse; +import feast.proto.serving.TransformationServiceAPIProto.ValueType; +import feast.proto.serving.TransformationServiceGrpc; +import feast.proto.types.ValueProto; +import feast.serving.registry.RegistryRepository; +import io.grpc.ManagedChannel; +import io.grpc.ManagedChannelBuilder; +import io.grpc.Status; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.nio.channels.Channels; +import java.util.*; +import org.apache.arrow.memory.BufferAllocator; +import org.apache.arrow.memory.RootAllocator; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.ipc.ArrowFileReader; +import org.apache.arrow.vector.ipc.ArrowFileWriter; +import org.apache.arrow.vector.types.FloatingPointPrecision; +import org.apache.arrow.vector.types.pojo.ArrowType; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.arrow.vector.types.pojo.Schema; +import org.apache.arrow.vector.util.ByteArrayReadableSeekableByteChannel; +import org.apache.commons.lang3.tuple.Pair; +import org.slf4j.Logger; + +public class OnlineTransformationService implements TransformationService { + + private static final Logger log = + org.slf4j.LoggerFactory.getLogger(OnlineTransformationService.class); + private final TransformationServiceGrpc.TransformationServiceBlockingStub stub; + private final RegistryRepository registryRepository; + static final int INT64_BITWIDTH = 64; + static final int INT32_BITWIDTH = 32; + + public OnlineTransformationService( + String transformationServiceEndpoint, RegistryRepository registryRepository) { + if (transformationServiceEndpoint != null) { + final ManagedChannel channel = + ManagedChannelBuilder.forTarget(transformationServiceEndpoint).usePlaintext().build(); + this.stub = TransformationServiceGrpc.newBlockingStub(channel); + } else { + this.stub = null; + } + this.registryRepository = registryRepository; + } + + /** {@inheritDoc} */ + @Override + public TransformFeaturesResponse transformFeatures( + TransformFeaturesRequest transformFeaturesRequest) { + if (this.stub == null) { + throw new RuntimeException( + "Transformation service endpoint must be configured to enable this functionality."); + } + return this.stub.transformFeatures(transformFeaturesRequest); + } + + /** {@inheritDoc} */ + @Override + public List extractOnDemandFeaturesDependencies( + List onDemandFeatureReferences) { + List onDemandFeatureSources = new ArrayList<>(); + for (ServingAPIProto.FeatureReferenceV2 featureReference : onDemandFeatureReferences) { + OnDemandFeatureViewProto.OnDemandFeatureViewSpec onDemandFeatureViewSpec = + this.registryRepository.getOnDemandFeatureViewSpec(featureReference); + Map sources = + onDemandFeatureViewSpec.getSourcesMap(); + + for (OnDemandFeatureViewProto.OnDemandSource source : sources.values()) { + OnDemandFeatureViewProto.OnDemandSource.SourceCase sourceCase = source.getSourceCase(); + switch (sourceCase) { + case REQUEST_DATA_SOURCE: + // Do nothing. The value should be provided as dedicated request parameter + break; + case FEATURE_VIEW_PROJECTION: + FeatureReferenceProto.FeatureViewProjection projection = + source.getFeatureViewProjection(); + for (FeatureProto.FeatureSpecV2 featureSpec : projection.getFeatureColumnsList()) { + String featureName = featureSpec.getName(); + ServingAPIProto.FeatureReferenceV2 onDemandFeatureSource = + ServingAPIProto.FeatureReferenceV2.newBuilder() + .setFeatureViewName(projection.getFeatureViewName()) + .setFeatureName(featureName) + .build(); + onDemandFeatureSources.add(onDemandFeatureSource); + } + break; + case FEATURE_VIEW: + FeatureViewProto.FeatureView featureView = source.getFeatureView(); + FeatureViewProto.FeatureViewSpec featureViewSpec = featureView.getSpec(); + String featureViewName = featureViewSpec.getName(); + for (FeatureProto.FeatureSpecV2 featureSpec : featureViewSpec.getFeaturesList()) { + String featureName = featureSpec.getName(); + ServingAPIProto.FeatureReferenceV2 onDemandFeatureSource = + ServingAPIProto.FeatureReferenceV2.newBuilder() + .setFeatureViewName(featureViewName) + .setFeatureName(featureName) + .build(); + onDemandFeatureSources.add(onDemandFeatureSource); + } + break; + default: + throw Status.INTERNAL + .withDescription( + "OnDemandSource proto source field has an unexpected type: " + sourceCase) + .asRuntimeException(); + } + } + } + return onDemandFeatureSources; + } + + /** {@inheritDoc} */ + public void processTransformFeaturesResponse( + feast.proto.serving.TransformationServiceAPIProto.TransformFeaturesResponse + transformFeaturesResponse, + String onDemandFeatureViewName, + Set onDemandFeatureStringReferences, + ServingAPIProto.GetOnlineFeaturesResponse.Builder responseBuilder) { + try { + BufferAllocator allocator = new RootAllocator(Long.MAX_VALUE); + ArrowFileReader reader = + new ArrowFileReader( + new ByteArrayReadableSeekableByteChannel( + transformFeaturesResponse + .getTransformationOutput() + .getArrowValue() + .toByteArray()), + allocator); + reader.loadNextBatch(); + VectorSchemaRoot readBatch = reader.getVectorSchemaRoot(); + Schema responseSchema = readBatch.getSchema(); + List responseFields = responseSchema.getFields(); + Timestamp now = Timestamp.newBuilder().setSeconds(System.currentTimeMillis() / 1000).build(); + + for (Field field : responseFields) { + String columnName = field.getName(); + String fullFeatureName = columnName.replace("__", ":"); + ArrowType columnType = field.getType(); + + // The response will contain all features for the specified ODFV, so we + // skip the features that were not requested. + if (!onDemandFeatureStringReferences.contains(fullFeatureName)) { + continue; + } + + FieldVector fieldVector = readBatch.getVector(field); + int valueCount = fieldVector.getValueCount(); + ServingAPIProto.GetOnlineFeaturesResponse.FeatureVector.Builder vectorBuilder = + responseBuilder.addResultsBuilder(); + List valueList = Lists.newArrayListWithExpectedSize(valueCount); + + // TODO: support all Feast types + // TODO: clean up the switch statement + if (columnType instanceof ArrowType.Int) { + int bitWidth = ((ArrowType.Int) columnType).getBitWidth(); + switch (bitWidth) { + case INT64_BITWIDTH: + for (int i = 0; i < valueCount; i++) { + long int64Value = ((BigIntVector) fieldVector).get(i); + valueList.add(ValueProto.Value.newBuilder().setInt64Val(int64Value).build()); + } + break; + case INT32_BITWIDTH: + for (int i = 0; i < valueCount; i++) { + int int32Value = ((IntVector) fieldVector).get(i); + valueList.add(ValueProto.Value.newBuilder().setInt32Val(int32Value).build()); + } + break; + default: + throw Status.INTERNAL + .withDescription( + "Column " + + columnName + + " is of type ArrowType.Int but has bitWidth " + + bitWidth + + " which cannot be handled.") + .asRuntimeException(); + } + } else if (columnType instanceof ArrowType.FloatingPoint) { + FloatingPointPrecision precision = ((ArrowType.FloatingPoint) columnType).getPrecision(); + switch (precision) { + case DOUBLE: + for (int i = 0; i < valueCount; i++) { + double doubleValue = ((Float8Vector) fieldVector).get(i); + valueList.add(ValueProto.Value.newBuilder().setDoubleVal(doubleValue).build()); + } + break; + case SINGLE: + for (int i = 0; i < valueCount; i++) { + float floatValue = ((Float4Vector) fieldVector).get(i); + valueList.add(ValueProto.Value.newBuilder().setFloatVal(floatValue).build()); + } + break; + default: + throw Status.INTERNAL + .withDescription( + "Column " + + columnName + + " is of type ArrowType.FloatingPoint but has precision " + + precision + + " which cannot be handled.") + .asRuntimeException(); + } + } + + for (ValueProto.Value v : valueList) { + vectorBuilder.addValues(v); + vectorBuilder.addStatuses(ServingAPIProto.FieldStatus.PRESENT); + vectorBuilder.addEventTimestamps(now); + } + + responseBuilder.getMetadataBuilder().getFeatureNamesBuilder().addVal(fullFeatureName); + } + } catch (IOException e) { + log.info(e.toString()); + throw Status.INTERNAL + .withDescription( + "Unable to correctly process transform features response: " + e.toString()) + .asRuntimeException(); + } + } + + /** {@inheritDoc} */ + public ValueType serializeValuesIntoArrowIPC(List>> values) { + // In order to be serialized correctly, the data must be packaged in a VectorSchemaRoot. + // We first construct all the columns. + Map columnNameToColumn = new HashMap(); + BufferAllocator allocator = new RootAllocator(Long.MAX_VALUE); + + List columnFields = new ArrayList(); + List columns = new ArrayList(); + + for (Pair> columnEntry : values) { + // The Python FTS does not expect full feature names, so we extract the feature name. + String columnName = columnEntry.getKey(); + + List columnValues = columnEntry.getValue(); + FieldVector column; + ValueProto.Value.ValCase valCase = columnValues.get(0).getValCase(); + // TODO: support all Feast types + switch (valCase) { + case INT32_VAL: + column = new IntVector(columnName, allocator); + column.setValueCount(columnValues.size()); + for (int idx = 0; idx < columnValues.size(); idx++) { + ((IntVector) column).set(idx, columnValues.get(idx).getInt32Val()); + } + break; + case INT64_VAL: + column = new BigIntVector(columnName, allocator); + column.setValueCount(columnValues.size()); + for (int idx = 0; idx < columnValues.size(); idx++) { + ((BigIntVector) column).set(idx, columnValues.get(idx).getInt64Val()); + } + + break; + case DOUBLE_VAL: + column = new Float8Vector(columnName, allocator); + column.setValueCount(columnValues.size()); + for (int idx = 0; idx < columnValues.size(); idx++) { + ((Float8Vector) column).set(idx, columnValues.get(idx).getDoubleVal()); + } + break; + case FLOAT_VAL: + column = new Float4Vector(columnName, allocator); + column.setValueCount(columnValues.size()); + for (int idx = 0; idx < columnValues.size(); idx++) { + ((Float4Vector) column).set(idx, columnValues.get(idx).getFloatVal()); + } + break; + default: + throw Status.INTERNAL + .withDescription( + "Column " + columnName + " has a type that is currently not handled: " + valCase) + .asRuntimeException(); + } + + columns.add(column); + columnFields.add(column.getField()); + } + + VectorSchemaRoot schemaRoot = new VectorSchemaRoot(columnFields, columns); + + // Serialize the VectorSchemaRoot into Arrow IPC format. + ByteArrayOutputStream out = new ByteArrayOutputStream(); + ArrowFileWriter writer = new ArrowFileWriter(schemaRoot, null, Channels.newChannel(out)); + try { + writer.start(); + writer.writeBatch(); + writer.end(); + } catch (IOException e) { + log.info(e.toString()); + throw Status.INTERNAL + .withDescription( + "ArrowFileWriter could not write properly; failed with error: " + e.toString()) + .asRuntimeException(); + } + byte[] byteData = out.toByteArray(); + ByteString sourceData = ByteString.copyFrom(byteData); + ValueType transformationInput = ValueType.newBuilder().setArrowValue(sourceData).build(); + return transformationInput; + } +} diff --git a/java/serving/src/main/java/feast/serving/service/ServingServiceV2.java b/java/serving/src/main/java/feast/serving/service/ServingServiceV2.java new file mode 100644 index 0000000000..4a44f4e09e --- /dev/null +++ b/java/serving/src/main/java/feast/serving/service/ServingServiceV2.java @@ -0,0 +1,48 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2020 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving.service; + +import feast.proto.serving.ServingAPIProto; + +public interface ServingServiceV2 { + /** + * Get information about the Feast serving deployment. + * + *

For Bigquery deployments, this includes the default job staging location to load + * intermediate files to. Otherwise, this method only returns the current Feast Serving backing + * store type. + * + * @param getFeastServingInfoRequest {@link ServingAPIProto.GetFeastServingInfoRequest} + * @return {@link ServingAPIProto.GetFeastServingInfoResponse} + */ + ServingAPIProto.GetFeastServingInfoResponse getFeastServingInfo( + ServingAPIProto.GetFeastServingInfoRequest getFeastServingInfoRequest); + + /** + * Get features from an online serving store, given a list of {@link + * feast.proto.serving.ServingAPIProto.FeatureReferenceV2}s to retrieve or name of the feature + * service, and vectorized entities Map<String, {@link + * feast.proto.types.ValueProto.RepeatedValue}> to join the retrieved values to. + * + *

This request is fulfilled synchronously. + * + * @return {@link feast.proto.serving.ServingAPIProto.GetOnlineFeaturesResponse} with list of + * {@link feast.proto.serving.ServingAPIProto.GetOnlineFeaturesResponse.FeatureVector}. + */ + ServingAPIProto.GetOnlineFeaturesResponse getOnlineFeatures( + ServingAPIProto.GetOnlineFeaturesRequest getFeaturesRequest); +} diff --git a/java/serving/src/main/java/feast/serving/service/TransformationService.java b/java/serving/src/main/java/feast/serving/service/TransformationService.java new file mode 100644 index 0000000000..3d035f4f56 --- /dev/null +++ b/java/serving/src/main/java/feast/serving/service/TransformationService.java @@ -0,0 +1,68 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2020 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving.service; + +import feast.proto.serving.ServingAPIProto; +import feast.proto.serving.TransformationServiceAPIProto.TransformFeaturesRequest; +import feast.proto.serving.TransformationServiceAPIProto.TransformFeaturesResponse; +import feast.proto.serving.TransformationServiceAPIProto.ValueType; +import feast.proto.types.ValueProto; +import java.util.List; +import java.util.Set; +import org.apache.commons.lang3.tuple.Pair; + +public interface TransformationService { + /** + * Apply on demand transformations for the specified ODFVs. + * + * @param transformFeaturesRequest proto containing the ODFV references and necessary data + * @return a proto object containing the response + */ + TransformFeaturesResponse transformFeatures(TransformFeaturesRequest transformFeaturesRequest); + + /** + * Extract the list of on demand feature sources from a list of ODFV references. + * + * @param onDemandFeatureReferences list of ODFV references to be parsed + * @return list of on demand feature sources + */ + List extractOnDemandFeaturesDependencies( + List onDemandFeatureReferences); + + /** + * Process a response from the feature transformation server by augmenting the given lists of + * field maps and status maps with the correct fields from the response. + * + * @param transformFeaturesResponse response to be processed + * @param onDemandFeatureViewName name of ODFV to which the response corresponds + * @param onDemandFeatureStringReferences set of all ODFV references that should be kept + * @param responseBuilder {@link ServingAPIProto.GetOnlineFeaturesResponse.Builder} + */ + void processTransformFeaturesResponse( + TransformFeaturesResponse transformFeaturesResponse, + String onDemandFeatureViewName, + Set onDemandFeatureStringReferences, + ServingAPIProto.GetOnlineFeaturesResponse.Builder responseBuilder); + + /** + * Serialize data into Arrow IPC format, to be sent to the Python feature transformation server. + * + * @param values list of field maps to be serialized + * @return the data packaged into a ValueType proto object + */ + ValueType serializeValuesIntoArrowIPC(List>> values); +} diff --git a/java/serving/src/main/java/feast/serving/util/Metrics.java b/java/serving/src/main/java/feast/serving/util/Metrics.java new file mode 100644 index 0000000000..dca2b5e82a --- /dev/null +++ b/java/serving/src/main/java/feast/serving/util/Metrics.java @@ -0,0 +1,90 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving.util; + +import io.prometheus.client.Counter; +import io.prometheus.client.Histogram; + +public class Metrics { + + public static final Histogram requestLatency = + Histogram.build() + .name("request_latency_seconds") + .subsystem("feast_serving") + .help("Request latency in seconds") + .labelNames("method", "project") + .register(); + + public static final Histogram requestEntityCountDistribution = + Histogram.build() + .buckets(1, 2, 5, 10, 20, 50, 100, 200) + .name("request_entity_count_distribution") + .subsystem("feast_serving") + .help("Number of entity rows per request") + .labelNames("project") + .register(); + + public static final Histogram requestFeatureCountDistribution = + Histogram.build() + .buckets(1, 2, 5, 10, 15, 20, 30, 50) + .name("request_feature_count_distribution") + .subsystem("feast_serving") + .help("Number of feature rows per request") + .labelNames("project") + .register(); + + public static final Histogram requestFeatureTableCountDistribution = + Histogram.build() + .buckets(1, 2, 5, 10, 20) + .name("request_feature_table_count_distribution") + .subsystem("feast_serving") + .help("Number of feature tables per request") + .labelNames("project") + .register(); + + public static final Counter requestFeatureCount = + Counter.build() + .name("request_feature_count") + .subsystem("feast_serving") + .help("number of feature rows requested") + .labelNames("project", "feature_name") + .register(); + + public static final Counter notFoundKeyCount = + Counter.build() + .name("not_found_feature_count") + .subsystem("feast_serving") + .help("number requested feature rows that were not found") + .labelNames("project", "feature_name") + .register(); + + public static final Counter staleKeyCount = + Counter.build() + .name("stale_feature_count") + .subsystem("feast_serving") + .help("number requested feature rows that were stale") + .labelNames("project", "feature_name") + .register(); + + public static final Counter grpcRequestCount = + Counter.build() + .name("grpc_request_count") + .subsystem("feast_serving") + .help("number of grpc requests served") + .labelNames("method", "status_code") + .register(); +} diff --git a/java/serving/src/main/java/feast/serving/util/RequestHelper.java b/java/serving/src/main/java/feast/serving/util/RequestHelper.java new file mode 100644 index 0000000000..f730e01982 --- /dev/null +++ b/java/serving/src/main/java/feast/serving/util/RequestHelper.java @@ -0,0 +1,44 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving.util; + +import feast.common.models.Feature; +import feast.proto.serving.ServingAPIProto; +import feast.proto.serving.ServingAPIProto.FeatureReferenceV2; + +public class RequestHelper { + + public static void validateOnlineRequest(ServingAPIProto.GetOnlineFeaturesRequest request) { + // All EntityRows should not be empty + if (request.getEntitiesCount() <= 0) { + throw new IllegalArgumentException("Entity value must be provided"); + } + // All FeatureReferences should have FeatureTable name and Feature name + for (String featureReference : request.getFeatures().getValList()) { + validateOnlineRequestFeatureReference(Feature.parseFeatureReference(featureReference)); + } + } + + public static void validateOnlineRequestFeatureReference(FeatureReferenceV2 featureReference) { + if (featureReference.getFeatureViewName().isEmpty()) { + throw new IllegalArgumentException("FeatureTable name must be provided in FeatureReference"); + } + if (featureReference.getFeatureName().isEmpty()) { + throw new IllegalArgumentException("Feature name must be provided in FeatureReference"); + } + } +} diff --git a/java/serving/src/main/java/feast/serving/util/mappers/ResponseJSONMapper.java b/java/serving/src/main/java/feast/serving/util/mappers/ResponseJSONMapper.java new file mode 100644 index 0000000000..3ab9f43c34 --- /dev/null +++ b/java/serving/src/main/java/feast/serving/util/mappers/ResponseJSONMapper.java @@ -0,0 +1,82 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving.util.mappers; + +import feast.proto.serving.ServingAPIProto; +import feast.proto.types.ValueProto.Value; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +// ResponseJSONMapper maps GRPC Response types to more human readable JSON responses +public class ResponseJSONMapper { + + public static List> mapGetOnlineFeaturesResponse( + ServingAPIProto.GetOnlineFeaturesResponse response) { + return response.getResultsList().stream() + .map(fieldValues -> convertFieldValuesToMap(fieldValues)) + .collect(Collectors.toList()); + } + + private static Map convertFieldValuesToMap( + ServingAPIProto.GetOnlineFeaturesResponse.FeatureVector vec) { + return Map.of( + "values", + vec.getValuesList().stream() + .map(ResponseJSONMapper::extractValue) + .collect(Collectors.toList()), + "statuses", + vec.getStatusesList(), + "event_timestamp", + vec.getEventTimestampsList()); + } + + private static Object extractValue(Value value) { + switch (value.getValCase().getNumber()) { + case 1: + return value.getBytesVal(); + case 2: + return value.getStringVal(); + case 3: + return value.getInt32Val(); + case 4: + return value.getInt64Val(); + case 5: + return value.getDoubleVal(); + case 6: + return value.getFloatVal(); + case 7: + return value.getBoolVal(); + case 11: + return value.getBytesListVal(); + case 12: + return value.getStringListVal(); + case 13: + return value.getInt32ListVal(); + case 14: + return value.getInt64ListVal(); + case 15: + return value.getDoubleListVal(); + case 16: + return value.getFloatListVal(); + case 17: + return value.getBoolListVal(); + default: + return null; + } + } +} diff --git a/java/serving/src/main/resources/application.yml b/java/serving/src/main/resources/application.yml new file mode 100644 index 0000000000..1f6d5b34c4 --- /dev/null +++ b/java/serving/src/main/resources/application.yml @@ -0,0 +1,65 @@ +feast: + project: "" + registry: "prompt_dory/data/registry.db" + registryRefreshInterval: 0 + + # Indicates the active store. Only a single store in the last can be active at one time. In the future this key + # will be deprecated in order to allow multiple stores to be served from a single serving instance + activeStore: online + + # List of store configurations + stores: + # Please see https://api.docs.feast.dev/grpc/feast.core.pb.html#Store for configuration options + - name: online # Name of the store (referenced by active_store) + type: REDIS # Type of the store. REDIS, REDIS_CLUSTER are available options + config: # Store specific configuration. See + host: localhost + port: 6379 + # Subscriptions indicate which feature sets needs to be retrieved and used to populate this store + - name: online_cluster + type: REDIS_CLUSTER + config: # Store specific configuration. + # Connection string specifies the host:port of Redis instances in the redis cluster. + connection_string: "localhost:7000,localhost:7001,localhost:7002,localhost:7003,localhost:7004,localhost:7005" + read_from: MASTER + # Redis operation timeout in ISO-8601 format + timeout: PT0.5S + tracing: + # If true, Feast will provide tracing data (using OpenTracing API) for various RPC method calls + # which can be useful to debug performance issues and perform benchmarking + enabled: false + # Only Jaeger tracer is supported currently + # https://opentracing.io/docs/supported-tracers/ + tracerName: jaeger + # The service name identifier for the tracing data + serviceName: feast_serving + + logging: + # Audit logging provides a machine readable structured JSON log that can give better + # insight into what is happening in Feast. + audit: + # Whether audit logging is enabled. + enabled: true + # Whether to enable message level (ie request/response) audit logging + messageLogging: + enabled: false + # Logging forwarder currently provides a machine readable structured JSON log to an + # external fluentd service that can give better insight into what is happening in Feast. + # Accepts console / fluentd as destination + destination: console + fluentdHost: localhost + fluentdPort: 24224 + +grpc: + server: + # The port number Feast Serving GRPC service should listen on + # It is set default to 6566 so it does not conflict with the GRPC server on Feast Core + # which defaults to port 6565 + port: 6566 + +rest: + server: + # The port number on which the Tomcat webserver that serves REST API endpoints should listen + # It is set by default to 8081 so it does not conflict with Tomcat webserver on Feast Core + # if both Feast Core and Serving are running on the same machine + port: 8081 diff --git a/java/serving/src/main/resources/banner.txt b/java/serving/src/main/resources/banner.txt new file mode 100644 index 0000000000..44aa9c4cad --- /dev/null +++ b/java/serving/src/main/resources/banner.txt @@ -0,0 +1,14 @@ + +███████╗███████╗ █████╗ ███████╗████████╗ +██╔════╝██╔════╝██╔══██╗██╔════╝╚══██╔══╝ +█████╗ █████╗ ███████║███████╗ ██║ +██╔══╝ ██╔══╝ ██╔══██║╚════██║ ██║ +██║ ███████╗██║ ██║███████║ ██║ +╚═╝ ╚══════╝╚═╝ ╚═╝╚══════╝ ╚═╝ + +███████╗███████╗██████╗ ██╗ ██╗██╗███╗ ██╗ ██████╗ +██╔════╝██╔════╝██╔══██╗██║ ██║██║████╗ ██║██╔════╝ +███████╗█████╗ ██████╔╝██║ ██║██║██╔██╗ ██║██║ ███╗ +╚════██║██╔══╝ ██╔══██╗╚██╗ ██╔╝██║██║╚██╗██║██║ ██║ +███████║███████╗██║ ██║ ╚████╔╝ ██║██║ ╚████║╚██████╔╝ +╚══════╝╚══════╝╚═╝ ╚═╝ ╚═══╝ ╚═╝╚═╝ ╚═══╝ ╚═════╝ diff --git a/java/serving/src/main/resources/log4j2.xml b/java/serving/src/main/resources/log4j2.xml new file mode 100644 index 0000000000..c75c2db13c --- /dev/null +++ b/java/serving/src/main/resources/log4j2.xml @@ -0,0 +1,48 @@ + + + + + + + %d{yyyy-MM-dd HH:mm:ss.SSS} %5p ${hostName} --- [%15.15t] %-40.40c{1.} : %m%n%ex + + + {"time":"%d{yyyy-MM-dd'T'HH:mm:ssXXX}","hostname":"${hostName}","severity":"%p","message":%m}%n%ex + + + + + + + + + + + + + + + + + + + + + + + diff --git a/java/serving/src/test/java/feast/serving/it/ServingBaseTests.java b/java/serving/src/test/java/feast/serving/it/ServingBaseTests.java new file mode 100644 index 0000000000..c610d7df6b --- /dev/null +++ b/java/serving/src/test/java/feast/serving/it/ServingBaseTests.java @@ -0,0 +1,184 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2021 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving.it; + +import static org.awaitility.Awaitility.await; +import static org.hamcrest.Matchers.equalTo; +import static org.junit.jupiter.api.Assertions.*; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import feast.proto.core.FeatureProto; +import feast.proto.core.FeatureViewProto; +import feast.proto.core.RegistryProto; +import feast.proto.serving.ServingAPIProto; +import feast.proto.serving.ServingAPIProto.FieldStatus; +import feast.proto.types.ValueProto; +import feast.serving.util.DataGenerator; +import io.grpc.StatusRuntimeException; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import org.junit.jupiter.api.*; + +abstract class ServingBaseTests extends ServingEnvironment { + + protected ServingAPIProto.GetOnlineFeaturesRequest buildOnlineRequest(int driverId) { + // getOnlineFeatures Information + String entityName = "driver_id"; + + // Instantiate EntityRows + Map entityRows = + ImmutableMap.of( + entityName, + ValueProto.RepeatedValue.newBuilder() + .addVal(DataGenerator.createInt64Value(driverId)) + .build()); + + ImmutableList featureReferences = + ImmutableList.of("driver_hourly_stats:conv_rate", "driver_hourly_stats:avg_daily_trips"); + + // Build GetOnlineFeaturesRequestV2 + return TestUtils.createOnlineFeatureRequest(featureReferences, entityRows); + } + + static RegistryProto.Registry registryProto = readLocalRegistry(); + + private static RegistryProto.Registry readLocalRegistry() { + try { + return RegistryProto.Registry.parseFrom( + Files.readAllBytes(Paths.get("src/test/resources/docker-compose/feast10/registry.db"))); + } catch (IOException e) { + e.printStackTrace(); + } + + return null; + } + + @Test + public void shouldGetOnlineFeatures() { + ServingAPIProto.GetOnlineFeaturesResponse featureResponse = + servingStub.getOnlineFeatures(buildOnlineRequest(1005)); + + assertEquals(2, featureResponse.getResultsCount()); + assertEquals(1, featureResponse.getResults(0).getValuesCount()); + + assertEquals( + ImmutableList.of("driver_hourly_stats:conv_rate", "driver_hourly_stats:avg_daily_trips"), + featureResponse.getMetadata().getFeatureNames().getValList()); + + for (int featureIdx : List.of(0, 1)) { + assertEquals( + List.of(ServingAPIProto.FieldStatus.PRESENT), + featureResponse.getResults(featureIdx).getStatusesList()); + } + + assertEquals(0.5, featureResponse.getResults(0).getValues(0).getDoubleVal(), 0.0001); + assertEquals(500, featureResponse.getResults(1).getValues(0).getInt64Val()); + } + + @Test + public void shouldGetOnlineFeaturesWithOutsideMaxAgeStatus() { + ServingAPIProto.GetOnlineFeaturesResponse featureResponse = + servingStub.getOnlineFeatures(buildOnlineRequest(1001)); + + assertEquals(2, featureResponse.getResultsCount()); + assertEquals(1, featureResponse.getResults(0).getValuesCount()); + + for (int featureIdx : List.of(0, 1)) { + assertEquals( + FieldStatus.OUTSIDE_MAX_AGE, featureResponse.getResults(featureIdx).getStatuses(0)); + } + + assertEquals(0.1, featureResponse.getResults(0).getValues(0).getDoubleVal(), 0.0001); + assertEquals(100, featureResponse.getResults(1).getValues(0).getInt64Val()); + } + + @Test + public void shouldGetOnlineFeaturesWithNotFoundStatus() { + ServingAPIProto.GetOnlineFeaturesResponse featureResponse = + servingStub.getOnlineFeatures(buildOnlineRequest(-1)); + + assertEquals(2, featureResponse.getResultsCount()); + assertEquals(1, featureResponse.getResults(0).getValuesCount()); + + for (final int featureIdx : List.of(0, 1)) { + assertEquals(FieldStatus.NOT_FOUND, featureResponse.getResults(featureIdx).getStatuses(0)); + } + } + + @Test + public void shouldRefreshRegistryAndServeNewFeatures() throws InterruptedException { + updateRegistryFile( + registryProto + .toBuilder() + .addFeatureViews( + FeatureViewProto.FeatureView.newBuilder() + .setSpec( + FeatureViewProto.FeatureViewSpec.newBuilder() + .setName("new_view") + .addEntities("driver_id") + .addFeatures( + FeatureProto.FeatureSpecV2.newBuilder() + .setName("new_feature") + .setValueType(ValueProto.ValueType.Enum.BOOL)))) + .build()); + + ServingAPIProto.GetOnlineFeaturesRequest request = buildOnlineRequest(1005); + + ServingAPIProto.GetOnlineFeaturesRequest requestWithNewFeature = + request + .toBuilder() + .setFeatures(request.getFeatures().toBuilder().addVal("new_view:new_feature")) + .build(); + + await() + .ignoreException(StatusRuntimeException.class) + .atMost(5, TimeUnit.SECONDS) + .until( + () -> servingStub.getOnlineFeatures(requestWithNewFeature).getResultsCount(), + equalTo(3)); + } + + /** https://github.com/feast-dev/feast/issues/2253 */ + @Test + public void shouldGetOnlineFeaturesWithStringEntity() { + Map entityRows = + ImmutableMap.of( + "entity", + ValueProto.RepeatedValue.newBuilder() + .addVal(DataGenerator.createStrValue("key-1")) + .build()); + + ImmutableList featureReferences = + ImmutableList.of("feature_view_0:feature_0", "feature_view_0:feature_1"); + + ServingAPIProto.GetOnlineFeaturesRequest req = + TestUtils.createOnlineFeatureRequest(featureReferences, entityRows); + + ServingAPIProto.GetOnlineFeaturesResponse resp = servingStub.getOnlineFeatures(req); + + for (final int featureIdx : List.of(0, 1)) { + assertEquals(FieldStatus.PRESENT, resp.getResults(featureIdx).getStatuses(0)); + } + } + + abstract void updateRegistryFile(RegistryProto.Registry registry); +} diff --git a/java/serving/src/test/java/feast/serving/it/ServingBenchmarkIT.java b/java/serving/src/test/java/feast/serving/it/ServingBenchmarkIT.java new file mode 100644 index 0000000000..1d77c2e4f7 --- /dev/null +++ b/java/serving/src/test/java/feast/serving/it/ServingBenchmarkIT.java @@ -0,0 +1,152 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2021 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving.it; + +import com.google.api.client.util.Lists; +import com.google.common.base.Stopwatch; +import com.google.common.collect.ImmutableMap; +import com.google.common.math.Quantiles; +import feast.proto.serving.ServingAPIProto; +import feast.proto.types.ValueProto; +import feast.serving.config.ApplicationProperties; +import feast.serving.util.DataGenerator; +import java.util.List; +import java.util.LongSummaryStatistics; +import java.util.Map; +import java.util.Random; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.IntStream; +import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class ServingBenchmarkIT extends ServingEnvironment { + private Random rand = new Random(); + public static final Logger log = LoggerFactory.getLogger(ServingBenchmarkIT.class); + + private static int WARM_UP_COUNT = 10; + + @Override + ApplicationProperties.FeastProperties createFeastProperties() { + return TestUtils.createBasicFeastProperties( + environment.getServiceHost("redis", 6379), environment.getServicePort("redis", 6379)); + } + + protected ServingAPIProto.GetOnlineFeaturesRequest buildOnlineRequest( + int rowsCount, int featuresCount) { + List entities = + IntStream.range(0, rowsCount) + .mapToObj( + i -> DataGenerator.createStrValue(String.format("key-%s", rand.nextInt(1000)))) + .collect(Collectors.toList()); + + List featureReferences = + IntStream.range(0, featuresCount) + .mapToObj(i -> String.format("feature_view_%d:feature_%d", i / 10, i)) + .collect(Collectors.toList()); + + Map entityRows = + ImmutableMap.of( + "entity", ValueProto.RepeatedValue.newBuilder().addAllVal(entities).build()); + + return TestUtils.createOnlineFeatureRequest(featureReferences, entityRows); + } + + protected ServingAPIProto.GetOnlineFeaturesRequest buildOnlineRequest(int rowsCount) { + List entities = + IntStream.range(0, rowsCount) + .mapToObj(i -> DataGenerator.createInt64Value(rand.nextInt(1000))) + .collect(Collectors.toList()); + + Map entityRows = + ImmutableMap.of( + "entity", ValueProto.RepeatedValue.newBuilder().addAllVal(entities).build()); + + return TestUtils.createOnlineFeatureRequest("benchmark_feature_service", entityRows); + } + + @Test + public void benchmarkServing100rows10features() { + ServingAPIProto.GetOnlineFeaturesRequest req = buildOnlineRequest(100, 10); + + measure( + () -> servingStub.withDeadlineAfter(1, TimeUnit.SECONDS).getOnlineFeatures(req), + "100 rows; 10 features", + 1000); + } + + @Test + public void benchmarkServing100rows50features() { + ServingAPIProto.GetOnlineFeaturesRequest req = buildOnlineRequest(100, 50); + + measure( + () -> servingStub.withDeadlineAfter(1, TimeUnit.SECONDS).getOnlineFeatures(req), + "100 rows; 50 features", + 1000); + } + + @Test + public void benchmarkServing100rows100features() { + ServingAPIProto.GetOnlineFeaturesRequest req = buildOnlineRequest(100, 100); + + measure( + () -> servingStub.withDeadlineAfter(1, TimeUnit.SECONDS).getOnlineFeatures(req), + "100 rows; 100 features", + 1000); + } + + @Test + public void benchmarkServing100rowsFullFeatureService() { + ServingAPIProto.GetOnlineFeaturesRequest req = buildOnlineRequest(100); + + measure( + () -> servingStub.withDeadlineAfter(1, TimeUnit.SECONDS).getOnlineFeatures(req), + "100 rows; Full FS", + 1000); + } + + private void measure(Runnable target, String name, int runs) { + Stopwatch timer = Stopwatch.createUnstarted(); + + List records = Lists.newArrayList(); + + for (int i = 0; i < runs; i++) { + timer.reset(); + timer.start(); + target.run(); + timer.stop(); + if (i >= WARM_UP_COUNT) { + records.add(timer.elapsed(TimeUnit.MILLISECONDS)); + } + } + + LongSummaryStatistics summary = + records.stream().collect(Collectors.summarizingLong(Long::longValue)); + + log.info(String.format("Test %s took (min): %d ms", name, summary.getMin())); + log.info(String.format("Test %s took (avg): %f ms", name, summary.getAverage())); + log.info( + String.format("Test %s took (median): %f ms", name, Quantiles.median().compute(records))); + log.info( + String.format( + "Test %s took (95p): %f ms", name, Quantiles.percentiles().index(95).compute(records))); + log.info( + String.format( + "Test %s took (99p): %f ms", name, Quantiles.percentiles().index(99).compute(records))); + } +} diff --git a/java/serving/src/test/java/feast/serving/it/ServingEnvironment.java b/java/serving/src/test/java/feast/serving/it/ServingEnvironment.java new file mode 100644 index 0000000000..c00dc7b1f3 --- /dev/null +++ b/java/serving/src/test/java/feast/serving/it/ServingEnvironment.java @@ -0,0 +1,172 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2021 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving.it; + +import static org.hamcrest.CoreMatchers.*; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.greaterThan; + +import com.google.inject.*; +import com.google.inject.Module; +import com.google.inject.util.Modules; +import feast.proto.serving.ServingServiceGrpc; +import feast.serving.config.*; +import feast.serving.grpc.OnlineServingGrpcServiceV2; +import io.grpc.ManagedChannel; +import io.grpc.ManagedChannelBuilder; +import io.grpc.Server; +import io.grpc.util.MutableHandlerRegistry; +import java.io.File; +import java.io.IOException; +import java.net.ServerSocket; +import java.util.concurrent.TimeUnit; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.testcontainers.containers.DockerComposeContainer; +import org.testcontainers.containers.wait.strategy.Wait; +import org.testcontainers.junit.jupiter.Testcontainers; + +@Testcontainers +abstract class ServingEnvironment { + static DockerComposeContainer environment; + + ServingServiceGrpc.ServingServiceBlockingStub servingStub; + Injector injector; + String serverName; + ManagedChannel channel; + Server server; + MutableHandlerRegistry serviceRegistry; + + static int serverPort = getFreePort(); + + @BeforeAll + static void globalSetup() { + environment = + new DockerComposeContainer( + new File("src/test/resources/docker-compose/docker-compose-redis-it.yml")) + .withExposedService("redis", 6379) + .withExposedService("feast", 8080) + .waitingFor("feast", Wait.forListeningPort()); + environment.start(); + } + + @AfterAll + static void globalTeardown() { + environment.stop(); + } + + @BeforeEach + public void envSetUp() throws Exception { + AbstractModule appPropertiesModule = + new AbstractModule() { + @Override + protected void configure() { + bind(OnlineServingGrpcServiceV2.class); + } + + @Provides + ApplicationProperties applicationProperties() { + final ApplicationProperties p = new ApplicationProperties(); + + ApplicationProperties.GrpcServer grpcServer = new ApplicationProperties.GrpcServer(); + ApplicationProperties.Server server = new ApplicationProperties.Server(); + server.setPort(serverPort); + grpcServer.setServer(server); + p.setGrpc(grpcServer); + + final ApplicationProperties.FeastProperties feastProperties = createFeastProperties(); + feastProperties.setAwsRegion("us-east-1"); + p.setFeast(feastProperties); + + final ApplicationProperties.TracingProperties tracingProperties = + new ApplicationProperties.TracingProperties(); + feastProperties.setTracing(tracingProperties); + + tracingProperties.setEnabled(false); + return p; + } + }; + + Module overrideConfig = registryConfig(); + Module registryConfig; + if (overrideConfig != null) { + registryConfig = Modules.override(new RegistryConfig()).with(registryConfig()); + } else { + registryConfig = new RegistryConfig(); + } + + injector = + Guice.createInjector( + new ServingServiceConfigV2(), + registryConfig, + new InstrumentationConfig(), + appPropertiesModule, + new ServerModule()); + + server = injector.getInstance(Server.class); + server.start(); + + channel = ManagedChannelBuilder.forAddress("localhost", serverPort).usePlaintext().build(); + + servingStub = + ServingServiceGrpc.newBlockingStub(channel) + .withDeadlineAfter(5, TimeUnit.SECONDS) + .withWaitForReady(); + } + + @AfterEach + public void envTeardown() throws Exception { + // assume channel and server are not null + channel.shutdown(); + server.shutdown(); + // fail the test if cannot gracefully shutdown + try { + assert channel.awaitTermination(5, TimeUnit.SECONDS) + : "channel cannot be gracefully shutdown"; + assert server.awaitTermination(5, TimeUnit.SECONDS) : "server cannot be gracefully shutdown"; + } finally { + channel.shutdownNow(); + server.shutdownNow(); + } + + server = null; + channel = null; + servingStub = null; + } + + abstract ApplicationProperties.FeastProperties createFeastProperties(); + + AbstractModule registryConfig() { + return null; + } + + private static int getFreePort() { + ServerSocket serverSocket; + try { + serverSocket = new ServerSocket(0); + } catch (IOException e) { + throw new RuntimeException("Couldn't allocate port"); + } + + assertThat(serverSocket, is(notNullValue())); + assertThat(serverSocket.getLocalPort(), greaterThan(0)); + + return serverSocket.getLocalPort(); + } +} diff --git a/java/serving/src/test/java/feast/serving/it/ServingRedisGSRegistryIT.java b/java/serving/src/test/java/feast/serving/it/ServingRedisGSRegistryIT.java new file mode 100644 index 0000000000..78871cd45c --- /dev/null +++ b/java/serving/src/test/java/feast/serving/it/ServingRedisGSRegistryIT.java @@ -0,0 +1,76 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2021 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving.it; + +import static org.junit.jupiter.api.Assertions.*; + +import com.google.cloud.storage.*; +import com.google.cloud.storage.testing.RemoteStorageHelper; +import feast.proto.core.RegistryProto; +import feast.serving.config.ApplicationProperties; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; + +public class ServingRedisGSRegistryIT extends ServingBaseTests { + static Storage storage = + RemoteStorageHelper.create() + .getOptions() + .toBuilder() + .setProjectId(System.getProperty("GCP_PROJECT", "kf-feast")) + .build() + .getService(); + + static final String bucket = RemoteStorageHelper.generateBucketName(); + + static void putToStorage(BlobId blobId, RegistryProto.Registry registry) { + storage.create(BlobInfo.newBuilder(blobId).build(), registry.toByteArray()); + + assertArrayEquals(storage.get(blobId).getContent(), registry.toByteArray()); + } + + static BlobId blobId; + + @BeforeAll + static void setUp() { + storage.create(BucketInfo.of(bucket)); + blobId = BlobId.of(bucket, "registry.db"); + + putToStorage(blobId, registryProto); + } + + @AfterAll + static void tearDown() throws ExecutionException, InterruptedException { + RemoteStorageHelper.forceDelete(storage, bucket, 5, TimeUnit.SECONDS); + } + + @Override + ApplicationProperties.FeastProperties createFeastProperties() { + final ApplicationProperties.FeastProperties feastProperties = + TestUtils.createBasicFeastProperties( + environment.getServiceHost("redis", 6379), environment.getServicePort("redis", 6379)); + feastProperties.setRegistry(blobId.toGsUtilUri()); + + return feastProperties; + } + + @Override + void updateRegistryFile(RegistryProto.Registry registry) { + putToStorage(blobId, registry); + } +} diff --git a/java/serving/src/test/java/feast/serving/it/ServingRedisLocalRegistryIT.java b/java/serving/src/test/java/feast/serving/it/ServingRedisLocalRegistryIT.java new file mode 100644 index 0000000000..c83d8dbbf1 --- /dev/null +++ b/java/serving/src/test/java/feast/serving/it/ServingRedisLocalRegistryIT.java @@ -0,0 +1,34 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2020 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving.it; + +import feast.proto.core.RegistryProto; +import feast.serving.config.ApplicationProperties; + +public class ServingRedisLocalRegistryIT extends ServingBaseTests { + @Override + ApplicationProperties.FeastProperties createFeastProperties() { + return TestUtils.createBasicFeastProperties( + environment.getServiceHost("redis", 6379), environment.getServicePort("redis", 6379)); + } + + @Override + void updateRegistryFile(RegistryProto.Registry registry) {} + + @Override + public void shouldRefreshRegistryAndServeNewFeatures() throws InterruptedException {} +} diff --git a/java/serving/src/test/java/feast/serving/it/ServingRedisS3RegistryIT.java b/java/serving/src/test/java/feast/serving/it/ServingRedisS3RegistryIT.java new file mode 100644 index 0000000000..d67fbf2621 --- /dev/null +++ b/java/serving/src/test/java/feast/serving/it/ServingRedisS3RegistryIT.java @@ -0,0 +1,91 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2021 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving.it; + +import com.adobe.testing.s3mock.testcontainers.S3MockContainer; +import com.amazonaws.client.builder.AwsClientBuilder; +import com.amazonaws.services.s3.AmazonS3; +import com.amazonaws.services.s3.AmazonS3ClientBuilder; +import com.amazonaws.services.s3.model.ObjectMetadata; +import com.google.inject.AbstractModule; +import com.google.inject.Provides; +import feast.proto.core.RegistryProto; +import feast.serving.config.ApplicationProperties; +import java.io.ByteArrayInputStream; +import org.junit.jupiter.api.BeforeAll; +import org.testcontainers.junit.jupiter.Container; + +public class ServingRedisS3RegistryIT extends ServingBaseTests { + @Container static final S3MockContainer s3Mock = new S3MockContainer("2.2.3"); + + private static AmazonS3 createClient() { + return AmazonS3ClientBuilder.standard() + .withEndpointConfiguration( + new AwsClientBuilder.EndpointConfiguration( + String.format("http://localhost:%d", s3Mock.getHttpServerPort()), "us-east-1")) + .enablePathStyleAccess() + .build(); + } + + private static void putToStorage(RegistryProto.Registry proto) { + byte[] bytes = proto.toByteArray(); + ObjectMetadata metadata = new ObjectMetadata(); + metadata.setContentLength(bytes.length); + metadata.setContentType("application/protobuf"); + + AmazonS3 s3Client = createClient(); + s3Client.putObject("test-bucket", "registry.db", new ByteArrayInputStream(bytes), metadata); + } + + @BeforeAll + static void setUp() { + AmazonS3 s3Client = createClient(); + s3Client.createBucket("test-bucket"); + + putToStorage(registryProto); + } + + @Override + ApplicationProperties.FeastProperties createFeastProperties() { + final ApplicationProperties.FeastProperties feastProperties = + TestUtils.createBasicFeastProperties( + environment.getServiceHost("redis", 6379), environment.getServicePort("redis", 6379)); + feastProperties.setRegistry("s3://test-bucket/registry.db"); + + return feastProperties; + } + + @Override + void updateRegistryFile(RegistryProto.Registry registry) { + putToStorage(registry); + } + + @Override + AbstractModule registryConfig() { + return new AbstractModule() { + @Provides + public AmazonS3 awsStorage() { + return AmazonS3ClientBuilder.standard() + .withEndpointConfiguration( + new AwsClientBuilder.EndpointConfiguration( + String.format("http://localhost:%d", s3Mock.getHttpServerPort()), "us-east-1")) + .enablePathStyleAccess() + .build(); + } + }; + } +} diff --git a/java/serving/src/test/java/feast/serving/it/TestUtils.java b/java/serving/src/test/java/feast/serving/it/TestUtils.java new file mode 100644 index 0000000000..9bca14db4e --- /dev/null +++ b/java/serving/src/test/java/feast/serving/it/TestUtils.java @@ -0,0 +1,91 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2020 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving.it; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import feast.proto.serving.ServingAPIProto; +import feast.proto.serving.ServingAPIProto.GetOnlineFeaturesRequest; +import feast.proto.serving.ServingServiceGrpc; +import feast.proto.types.ValueProto; +import feast.serving.config.ApplicationProperties; +import io.grpc.Channel; +import io.grpc.ManagedChannelBuilder; +import java.util.*; + +public class TestUtils { + + public static ServingServiceGrpc.ServingServiceBlockingStub getServingServiceStub( + boolean isSecure, int feastServingPort, Map options) { + Channel secureChannel = + ManagedChannelBuilder.forAddress("localhost", feastServingPort).usePlaintext().build(); + return ServingServiceGrpc.newBlockingStub(secureChannel); + } + + public static GetOnlineFeaturesRequest createOnlineFeatureRequest( + List featureReferences, Map entityRows) { + return createOnlineFeatureRequest(featureReferences, entityRows, new HashMap<>()); + } + + public static GetOnlineFeaturesRequest createOnlineFeatureRequest( + List featureReferences, + Map entityRows, + Map requestContext) { + return GetOnlineFeaturesRequest.newBuilder() + .setFeatures(ServingAPIProto.FeatureList.newBuilder().addAllVal(featureReferences)) + .putAllEntities(entityRows) + .putAllRequestContext(requestContext) + .build(); + } + + public static GetOnlineFeaturesRequest createOnlineFeatureRequest( + String featureService, Map entityRows) { + return createOnlineFeatureRequest(featureService, entityRows, new HashMap<>()); + } + + public static GetOnlineFeaturesRequest createOnlineFeatureRequest( + String featureService, + Map entityRows, + Map requestContext) { + return GetOnlineFeaturesRequest.newBuilder() + .setFeatureService(featureService) + .putAllEntities(entityRows) + .putAllRequestContext(requestContext) + .build(); + } + + public static ApplicationProperties.FeastProperties createBasicFeastProperties( + String redisHost, Integer redisPort) { + final ApplicationProperties.FeastProperties feastProperties = + new ApplicationProperties.FeastProperties(); + feastProperties.setRegistry("src/test/resources/docker-compose/feast10/registry.db"); + feastProperties.setRegistryRefreshInterval(1); + + feastProperties.setActiveStore("online"); + feastProperties.setProject("feast_project"); + + feastProperties.setStores( + ImmutableList.of( + new ApplicationProperties.Store( + "online", + "REDIS", + ImmutableMap.of( + "host", redisHost, "port", redisPort.toString(), "password", "testpw")))); + + return feastProperties; + } +} diff --git a/java/serving/src/test/java/feast/serving/it/TransformationServiceIT.java b/java/serving/src/test/java/feast/serving/it/TransformationServiceIT.java new file mode 100644 index 0000000000..102d851528 --- /dev/null +++ b/java/serving/src/test/java/feast/serving/it/TransformationServiceIT.java @@ -0,0 +1,103 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2022 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving.it; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Lists; +import feast.proto.serving.ServingAPIProto; +import feast.proto.types.ValueProto; +import feast.serving.config.ApplicationProperties; +import feast.serving.util.DataGenerator; +import java.util.List; +import java.util.Map; +import org.junit.jupiter.api.Test; + +public class TransformationServiceIT extends ServingEnvironment { + @Override + ApplicationProperties.FeastProperties createFeastProperties() { + ApplicationProperties.FeastProperties feastProperties = + TestUtils.createBasicFeastProperties( + environment.getServiceHost("redis", 6379), environment.getServicePort("redis", 6379)); + feastProperties.setTransformationServiceEndpoint( + String.format( + "%s:%d", + environment.getServiceHost("feast", 8080), environment.getServicePort("feast", 8080))); + return feastProperties; + } + + private ServingAPIProto.GetOnlineFeaturesRequest buildOnlineRequest( + int driverId, boolean transformedFeaturesOnly) { + Map entityRows = + ImmutableMap.of( + "driver_id", + ValueProto.RepeatedValue.newBuilder() + .addVal(DataGenerator.createInt64Value(driverId)) + .build()); + + Map requestContext = + ImmutableMap.of( + "val_to_add", + ValueProto.RepeatedValue.newBuilder().addVal(DataGenerator.createInt64Value(3)).build(), + "val_to_add_2", + ValueProto.RepeatedValue.newBuilder() + .addVal(DataGenerator.createInt64Value(5)) + .build()); + + List featureReferences = + Lists.newArrayList( + "transformed_conv_rate:conv_rate_plus_val1", + "transformed_conv_rate:conv_rate_plus_val2"); + + if (!transformedFeaturesOnly) { + featureReferences.add("driver_hourly_stats:conv_rate"); + } + + return TestUtils.createOnlineFeatureRequest(featureReferences, entityRows, requestContext); + } + + @Test + public void shouldCalculateOnDemandFeatures() { + ServingAPIProto.GetOnlineFeaturesResponse featureResponse = + servingStub.getOnlineFeatures(buildOnlineRequest(1005, false)); + + for (int featureIdx : List.of(0, 1, 2)) { + assertEquals( + List.of(ServingAPIProto.FieldStatus.PRESENT), + featureResponse.getResults(featureIdx).getStatusesList()); + } + + // conv_rate + assertEquals(0.5, featureResponse.getResults(0).getValues(0).getDoubleVal(), 0.0001); + // conv_rate + val_to_add (3.0) + assertEquals(3.5, featureResponse.getResults(1).getValues(0).getDoubleVal(), 0.0001); + // conv_rate + val_to_add_2 (5.0) + assertEquals(5.5, featureResponse.getResults(2).getValues(0).getDoubleVal(), 0.0001); + } + + @Test + public void shouldCorrectlyFetchDependantFeatures() { + ServingAPIProto.GetOnlineFeaturesResponse featureResponse = + servingStub.getOnlineFeatures(buildOnlineRequest(1005, true)); + + // conv_rate + val_to_add (3.0) + assertEquals(3.5, featureResponse.getResults(0).getValues(0).getDoubleVal(), 0.0001); + // conv_rate + val_to_add_2 (5.0) + assertEquals(5.5, featureResponse.getResults(1).getValues(0).getDoubleVal(), 0.0001); + } +} diff --git a/java/serving/src/test/java/feast/serving/service/OnlineServingServiceTest.java b/java/serving/src/test/java/feast/serving/service/OnlineServingServiceTest.java new file mode 100644 index 0000000000..64d2e20c9b --- /dev/null +++ b/java/serving/src/test/java/feast/serving/service/OnlineServingServiceTest.java @@ -0,0 +1,390 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving.service; + +import static feast.serving.util.DataGenerator.*; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.when; +import static org.mockito.MockitoAnnotations.initMocks; + +import com.google.common.collect.ImmutableMap; +import com.google.protobuf.Duration; +import com.google.protobuf.Timestamp; +import feast.proto.core.FeatureProto; +import feast.proto.core.FeatureViewProto; +import feast.proto.serving.ServingAPIProto; +import feast.proto.serving.ServingAPIProto.FieldStatus; +import feast.proto.serving.ServingAPIProto.GetOnlineFeaturesResponse; +import feast.proto.types.ValueProto; +import feast.serving.registry.Registry; +import feast.serving.registry.RegistryRepository; +import feast.storage.api.retriever.Feature; +import feast.storage.api.retriever.ProtoFeature; +import feast.storage.connectors.redis.retriever.OnlineRetriever; +import io.opentracing.Tracer; +import io.opentracing.Tracer.SpanBuilder; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; +import org.junit.Before; +import org.junit.Test; +import org.mockito.ArgumentMatchers; +import org.mockito.Mock; +import org.mockito.Mockito; + +public class OnlineServingServiceTest { + + @Mock Registry registry; + @Mock Tracer tracer; + @Mock OnlineRetriever retrieverV2; + private String transformationServiceEndpoint; + + private OnlineServingServiceV2 onlineServingServiceV2; + + List mockedFeatureRows; + List featureSpecs; + + Timestamp now = Timestamp.newBuilder().setSeconds(System.currentTimeMillis() / 1000).build(); + + @Before + public void setUp() { + initMocks(this); + + RegistryRepository registryRepo = new RegistryRepository(registry); + + OnlineTransformationService onlineTransformationService = + new OnlineTransformationService(transformationServiceEndpoint, registryRepo); + onlineServingServiceV2 = + new OnlineServingServiceV2( + retrieverV2, tracer, registryRepo, onlineTransformationService, "feast_project"); + + mockedFeatureRows = new ArrayList<>(); + mockedFeatureRows.add( + new ProtoFeature( + ServingAPIProto.FeatureReferenceV2.newBuilder() + .setFeatureViewName("featureview_1") + .setFeatureName("feature_1") + .build(), + now, + createStrValue("1"))); + mockedFeatureRows.add( + new ProtoFeature( + ServingAPIProto.FeatureReferenceV2.newBuilder() + .setFeatureViewName("featureview_1") + .setFeatureName("feature_2") + .build(), + now, + createStrValue("2"))); + mockedFeatureRows.add( + new ProtoFeature( + ServingAPIProto.FeatureReferenceV2.newBuilder() + .setFeatureViewName("featureview_1") + .setFeatureName("feature_1") + .build(), + now, + createStrValue("3"))); + mockedFeatureRows.add( + new ProtoFeature( + ServingAPIProto.FeatureReferenceV2.newBuilder() + .setFeatureViewName("featureview_1") + .setFeatureName("feature_2") + .build(), + now, + createStrValue("4"))); + mockedFeatureRows.add( + new ProtoFeature( + ServingAPIProto.FeatureReferenceV2.newBuilder() + .setFeatureViewName("featureview_1") + .setFeatureName("feature_3") + .build(), + now, + createStrValue("5"))); + mockedFeatureRows.add( + new ProtoFeature( + ServingAPIProto.FeatureReferenceV2.newBuilder() + .setFeatureViewName("featureview_1") + .setFeatureName("feature_1") + .build(), + Timestamp.newBuilder().setSeconds(1).build(), + createStrValue("6"))); + + featureSpecs = new ArrayList<>(); + featureSpecs.add( + FeatureProto.FeatureSpecV2.newBuilder() + .setName("feature_1") + .setValueType(ValueProto.ValueType.Enum.STRING) + .build()); + featureSpecs.add( + FeatureProto.FeatureSpecV2.newBuilder() + .setName("feature_2") + .setValueType(ValueProto.ValueType.Enum.STRING) + .build()); + } + + @Test + public void shouldReturnResponseWithValuesAndMetadataIfKeysPresent() { + String projectName = "default"; + ServingAPIProto.FeatureReferenceV2 featureReference1 = + ServingAPIProto.FeatureReferenceV2.newBuilder() + .setFeatureViewName("featureview_1") + .setFeatureName("feature_1") + .build(); + ServingAPIProto.FeatureReferenceV2 featureReference2 = + ServingAPIProto.FeatureReferenceV2.newBuilder() + .setFeatureViewName("featureview_1") + .setFeatureName("feature_2") + .build(); + List featureReferences = + List.of(featureReference1, featureReference2); + ServingAPIProto.GetOnlineFeaturesRequest request = getOnlineFeaturesRequest(featureReferences); + + List> featureRows = + List.of( + List.of(mockedFeatureRows.get(0), mockedFeatureRows.get(1)), + List.of(mockedFeatureRows.get(2), mockedFeatureRows.get(3))); + + when(retrieverV2.getOnlineFeatures(any(), any(), any())).thenReturn(featureRows); + when(registry.getFeatureViewSpec(any())).thenReturn(getFeatureViewSpec()); + when(registry.getFeatureSpec(mockedFeatureRows.get(0).getFeatureReference())) + .thenReturn(featureSpecs.get(0)); + when(registry.getFeatureSpec(mockedFeatureRows.get(1).getFeatureReference())) + .thenReturn(featureSpecs.get(1)); + when(registry.getFeatureSpec(mockedFeatureRows.get(2).getFeatureReference())) + .thenReturn(featureSpecs.get(0)); + when(registry.getFeatureSpec(mockedFeatureRows.get(3).getFeatureReference())) + .thenReturn(featureSpecs.get(1)); + + when(tracer.buildSpan(ArgumentMatchers.any())).thenReturn(Mockito.mock(SpanBuilder.class)); + + GetOnlineFeaturesResponse expected = + GetOnlineFeaturesResponse.newBuilder() + .addResults( + GetOnlineFeaturesResponse.FeatureVector.newBuilder() + .addValues(createStrValue("1")) + .addValues(createStrValue("3")) + .addStatuses(FieldStatus.PRESENT) + .addStatuses(FieldStatus.PRESENT) + .addEventTimestamps(now) + .addEventTimestamps(now)) + .addResults( + GetOnlineFeaturesResponse.FeatureVector.newBuilder() + .addValues(createStrValue("2")) + .addValues(createStrValue("4")) + .addStatuses(FieldStatus.PRESENT) + .addStatuses(FieldStatus.PRESENT) + .addEventTimestamps(now) + .addEventTimestamps(now)) + .setMetadata( + ServingAPIProto.GetOnlineFeaturesResponseMetadata.newBuilder() + .setFeatureNames( + ServingAPIProto.FeatureList.newBuilder() + .addVal("featureview_1:feature_1") + .addVal("featureview_1:feature_2"))) + .build(); + ServingAPIProto.GetOnlineFeaturesResponse actual = + onlineServingServiceV2.getOnlineFeatures(request); + assertThat(actual, equalTo(expected)); + } + + @Test + public void shouldReturnResponseWithUnsetValuesAndMetadataIfKeysNotPresent() { + String projectName = "default"; + ServingAPIProto.FeatureReferenceV2 featureReference1 = + ServingAPIProto.FeatureReferenceV2.newBuilder() + .setFeatureViewName("featureview_1") + .setFeatureName("feature_1") + .build(); + ServingAPIProto.FeatureReferenceV2 featureReference2 = + ServingAPIProto.FeatureReferenceV2.newBuilder() + .setFeatureViewName("featureview_1") + .setFeatureName("feature_2") + .build(); + List featureReferences = + List.of(featureReference1, featureReference2); + ServingAPIProto.GetOnlineFeaturesRequest request = getOnlineFeaturesRequest(featureReferences); + + List entityKeyList1 = new ArrayList<>(); + List entityKeyList2 = new ArrayList<>(); + entityKeyList1.add(mockedFeatureRows.get(0)); + entityKeyList1.add(mockedFeatureRows.get(1)); + entityKeyList2.add(mockedFeatureRows.get(4)); + + List> featureRows = + List.of( + List.of(mockedFeatureRows.get(0), mockedFeatureRows.get(1)), + Arrays.asList(null, mockedFeatureRows.get(4))); + + when(retrieverV2.getOnlineFeatures(any(), any(), any())).thenReturn(featureRows); + when(registry.getFeatureViewSpec(any())).thenReturn(getFeatureViewSpec()); + when(registry.getFeatureSpec(mockedFeatureRows.get(0).getFeatureReference())) + .thenReturn(featureSpecs.get(0)); + when(registry.getFeatureSpec(mockedFeatureRows.get(1).getFeatureReference())) + .thenReturn(featureSpecs.get(1)); + + when(tracer.buildSpan(ArgumentMatchers.any())).thenReturn(Mockito.mock(SpanBuilder.class)); + + GetOnlineFeaturesResponse expected = + GetOnlineFeaturesResponse.newBuilder() + .addResults( + GetOnlineFeaturesResponse.FeatureVector.newBuilder() + .addValues(createStrValue("1")) + .addValues(createEmptyValue()) + .addStatuses(FieldStatus.PRESENT) + .addStatuses(FieldStatus.NOT_FOUND) + .addEventTimestamps(now) + .addEventTimestamps(Timestamp.newBuilder().build())) + .addResults( + GetOnlineFeaturesResponse.FeatureVector.newBuilder() + .addValues(createStrValue("2")) + .addValues(createStrValue("5")) + .addStatuses(FieldStatus.PRESENT) + .addStatuses(FieldStatus.PRESENT) + .addEventTimestamps(now) + .addEventTimestamps(now)) + .setMetadata( + ServingAPIProto.GetOnlineFeaturesResponseMetadata.newBuilder() + .setFeatureNames( + ServingAPIProto.FeatureList.newBuilder() + .addVal("featureview_1:feature_1") + .addVal("featureview_1:feature_2"))) + .build(); + GetOnlineFeaturesResponse actual = onlineServingServiceV2.getOnlineFeatures(request); + assertThat(actual, equalTo(expected)); + } + + @Test + public void shouldReturnResponseWithValuesAndMetadataIfMaxAgeIsExceeded() { + String projectName = "default"; + ServingAPIProto.FeatureReferenceV2 featureReference1 = + ServingAPIProto.FeatureReferenceV2.newBuilder() + .setFeatureViewName("featureview_1") + .setFeatureName("feature_1") + .build(); + ServingAPIProto.FeatureReferenceV2 featureReference2 = + ServingAPIProto.FeatureReferenceV2.newBuilder() + .setFeatureViewName("featureview_1") + .setFeatureName("feature_2") + .build(); + List featureReferences = + List.of(featureReference1, featureReference2); + ServingAPIProto.GetOnlineFeaturesRequest request = getOnlineFeaturesRequest(featureReferences); + + List> featureRows = + List.of( + List.of(mockedFeatureRows.get(5), mockedFeatureRows.get(1)), + List.of(mockedFeatureRows.get(5), mockedFeatureRows.get(1))); + + when(retrieverV2.getOnlineFeatures(any(), any(), any())).thenReturn(featureRows); + when(registry.getFeatureViewSpec(any())) + .thenReturn( + FeatureViewProto.FeatureViewSpec.newBuilder() + .setName("featureview_1") + .addEntities("entity1") + .addEntities("entity2") + .addFeatures( + FeatureProto.FeatureSpecV2.newBuilder() + .setName("feature_1") + .setValueType(ValueProto.ValueType.Enum.STRING) + .build()) + .addFeatures( + FeatureProto.FeatureSpecV2.newBuilder() + .setName("feature_2") + .setValueType(ValueProto.ValueType.Enum.STRING) + .build()) + .setTtl(Duration.newBuilder().setSeconds(3600)) + .build()); + when(registry.getFeatureSpec(mockedFeatureRows.get(1).getFeatureReference())) + .thenReturn(featureSpecs.get(1)); + when(registry.getFeatureSpec(mockedFeatureRows.get(5).getFeatureReference())) + .thenReturn(featureSpecs.get(0)); + + when(tracer.buildSpan(ArgumentMatchers.any())).thenReturn(Mockito.mock(SpanBuilder.class)); + + GetOnlineFeaturesResponse expected = + GetOnlineFeaturesResponse.newBuilder() + .addResults( + GetOnlineFeaturesResponse.FeatureVector.newBuilder() + .addValues(createStrValue("6")) + .addValues(createStrValue("6")) + .addStatuses(FieldStatus.OUTSIDE_MAX_AGE) + .addStatuses(FieldStatus.OUTSIDE_MAX_AGE) + .addEventTimestamps(Timestamp.newBuilder().setSeconds(1).build()) + .addEventTimestamps(Timestamp.newBuilder().setSeconds(1).build())) + .addResults( + GetOnlineFeaturesResponse.FeatureVector.newBuilder() + .addValues(createStrValue("2")) + .addValues(createStrValue("2")) + .addStatuses(FieldStatus.PRESENT) + .addStatuses(FieldStatus.PRESENT) + .addEventTimestamps(now) + .addEventTimestamps(now)) + .setMetadata( + ServingAPIProto.GetOnlineFeaturesResponseMetadata.newBuilder() + .setFeatureNames( + ServingAPIProto.FeatureList.newBuilder() + .addVal("featureview_1:feature_1") + .addVal("featureview_1:feature_2"))) + .build(); + GetOnlineFeaturesResponse actual = onlineServingServiceV2.getOnlineFeatures(request); + assertThat(actual, equalTo(expected)); + } + + private FeatureViewProto.FeatureViewSpec getFeatureViewSpec() { + return FeatureViewProto.FeatureViewSpec.newBuilder() + .setName("featureview_1") + .addEntities("entity1") + .addEntities("entity2") + .addFeatures( + FeatureProto.FeatureSpecV2.newBuilder() + .setName("feature_1") + .setValueType(ValueProto.ValueType.Enum.STRING) + .build()) + .addFeatures( + FeatureProto.FeatureSpecV2.newBuilder() + .setName("feature_2") + .setValueType(ValueProto.ValueType.Enum.STRING) + .build()) + .setTtl(Duration.newBuilder().setSeconds(120)) + .build(); + } + + private ServingAPIProto.GetOnlineFeaturesRequest getOnlineFeaturesRequest( + List featureReferences) { + return ServingAPIProto.GetOnlineFeaturesRequest.newBuilder() + .setFeatures( + ServingAPIProto.FeatureList.newBuilder() + .addAllVal( + featureReferences.stream() + .map(feast.common.models.Feature::getFeatureReference) + .collect(Collectors.toList())) + .build()) + .putAllEntities( + ImmutableMap.of( + "entity1", + ValueProto.RepeatedValue.newBuilder() + .addAllVal(List.of(createInt64Value(1), createInt64Value(2))) + .build(), + "entity2", + ValueProto.RepeatedValue.newBuilder() + .addAllVal(List.of(createStrValue("a"), createStrValue("b"))) + .build())) + .build(); + } +} diff --git a/java/serving/src/test/java/feast/serving/util/DataGenerator.java b/java/serving/src/test/java/feast/serving/util/DataGenerator.java new file mode 100644 index 0000000000..7a310828d2 --- /dev/null +++ b/java/serving/src/test/java/feast/serving/util/DataGenerator.java @@ -0,0 +1,313 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2020 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving.util; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.protobuf.Duration; +import com.google.protobuf.Timestamp; +import feast.proto.core.DataFormatProto.FileFormat; +import feast.proto.core.DataFormatProto.FileFormat.ParquetFormat; +import feast.proto.core.DataFormatProto.StreamFormat; +import feast.proto.core.DataFormatProto.StreamFormat.AvroFormat; +import feast.proto.core.DataFormatProto.StreamFormat.ProtoFormat; +import feast.proto.core.DataSourceProto.DataSource; +import feast.proto.core.DataSourceProto.DataSource.FileOptions; +import feast.proto.core.DataSourceProto.DataSource.KafkaOptions; +import feast.proto.core.DataSourceProto.DataSource.KinesisOptions; +import feast.proto.core.EntityProto; +import feast.proto.core.FeatureProto; +import feast.proto.core.FeatureProto.FeatureSpecV2; +import feast.proto.core.FeatureTableProto.FeatureTableSpec; +import feast.proto.core.StoreProto; +import feast.proto.serving.ServingAPIProto; +import feast.proto.types.ValueProto; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import org.apache.commons.lang3.tuple.Triple; + +public class DataGenerator { + // projectName, featureName, exclude + static Triple defaultSubscription = Triple.of("*", "*", false); + + static StoreProto.Store defaultStore = + createStore( + "test-store", StoreProto.Store.StoreType.REDIS, ImmutableList.of(defaultSubscription)); + + public static Triple getDefaultSubscription() { + return defaultSubscription; + } + + public static String valueToString(ValueProto.Value v) { + String stringRepr; + switch (v.getValCase()) { + case STRING_VAL: + stringRepr = v.getStringVal(); + break; + case INT64_VAL: + stringRepr = String.valueOf(v.getInt64Val()); + break; + case INT32_VAL: + stringRepr = String.valueOf(v.getInt32Val()); + break; + case BYTES_VAL: + stringRepr = v.getBytesVal().toString(); + break; + default: + throw new RuntimeException("Type is not supported to be entity"); + } + + return stringRepr; + } + + public static StoreProto.Store getDefaultStore() { + return defaultStore; + } + + public static StoreProto.Store createStore( + String name, + StoreProto.Store.StoreType type, + List> subscriptions) { + StoreProto.Store.Builder builder = + StoreProto.Store.newBuilder() + .addAllSubscriptions( + subscriptions.stream() + .map( + s -> + StoreProto.Store.Subscription.newBuilder() + .setProject(s.getLeft()) + .setName(s.getMiddle()) + .setExclude(s.getRight()) + .build()) + .collect(Collectors.toList())) + .setName(name) + .setType(type); + + switch (type) { + case REDIS: + StoreProto.Store.RedisConfig redisConfig = + StoreProto.Store.RedisConfig.newBuilder().build(); + return builder.setRedisConfig(redisConfig).build(); + case REDIS_CLUSTER: + StoreProto.Store.RedisClusterConfig redisClusterConfig = + StoreProto.Store.RedisClusterConfig.newBuilder().build(); + return builder.setRedisClusterConfig(redisClusterConfig).build(); + default: + throw new RuntimeException("Unrecognized Store type"); + } + } + + public static EntityProto.EntitySpecV2 createEntitySpecV2( + String name, + String description, + ValueProto.ValueType.Enum valueType, + Map tags) { + return EntityProto.EntitySpecV2.newBuilder() + .setName(name) + .setDescription(description) + .setValueType(valueType) + .putAllTags(tags) + .build(); + } + + public static FeatureProto.FeatureSpecV2 createFeatureSpecV2( + String name, ValueProto.ValueType.Enum valueType, Map tags) { + return FeatureProto.FeatureSpecV2.newBuilder() + .setName(name) + .setValueType(valueType) + .putAllTags(tags) + .build(); + } + + // Create a Feature Table spec without DataSources configured. + public static FeatureTableSpec createFeatureTableSpec( + String name, + List entities, + Map features, + int maxAgeSecs, + Map tags) { + + return FeatureTableSpec.newBuilder() + .setName(name) + .addAllEntities(entities) + .addAllFeatures( + features.entrySet().stream() + .map( + entry -> + FeatureSpecV2.newBuilder() + .setName(entry.getKey()) + .setValueType(entry.getValue()) + .putAllTags(tags) + .build()) + .collect(Collectors.toList())) + .setMaxAge(Duration.newBuilder().setSeconds(3600).build()) + .setBatchSource( + DataSource.newBuilder() + .setTimestampField("ts") + .setType(DataSource.SourceType.BATCH_FILE) + .setFileOptions( + FileOptions.newBuilder() + .setFileFormat( + FileFormat.newBuilder() + .setParquetFormat(ParquetFormat.newBuilder().build()) + .build()) + .setUri("/dev/null") + .build()) + .build()) + .putAllLabels(tags) + .build(); + } + + public static FeatureTableSpec createFeatureTableSpec( + String name, + List entities, + ImmutableMap features, + int maxAgeSecs, + Map tags) { + + return FeatureTableSpec.newBuilder() + .setName(name) + .addAllEntities(entities) + .addAllFeatures( + features.entrySet().stream() + .map( + entry -> + FeatureSpecV2.newBuilder() + .setName(entry.getKey()) + .setValueType(entry.getValue()) + .putAllTags(tags) + .build()) + .collect(Collectors.toList())) + .setMaxAge(Duration.newBuilder().setSeconds(maxAgeSecs).build()) + .putAllLabels(tags) + .build(); + } + + public static DataSource createFileDataSourceSpec( + String fileURL, String timestampColumn, String datePartitionColumn) { + return DataSource.newBuilder() + .setType(DataSource.SourceType.BATCH_FILE) + .setFileOptions( + FileOptions.newBuilder().setFileFormat(createParquetFormat()).setUri(fileURL).build()) + .setTimestampField(timestampColumn) + .setDatePartitionColumn(datePartitionColumn) + .build(); + } + + public static DataSource createBigQueryDataSourceSpec( + String bigQueryTable, String timestampColumn, String datePartitionColumn) { + return DataSource.newBuilder() + .setType(DataSource.SourceType.BATCH_BIGQUERY) + .setBigqueryOptions(DataSource.BigQueryOptions.newBuilder().setTable(bigQueryTable).build()) + .setTimestampField(timestampColumn) + .setDatePartitionColumn(datePartitionColumn) + .build(); + } + + public static DataSource createKafkaDataSourceSpec( + String servers, String topic, String classPath, String timestampColumn) { + return DataSource.newBuilder() + .setType(DataSource.SourceType.STREAM_KAFKA) + .setKafkaOptions( + KafkaOptions.newBuilder() + .setTopic(topic) + .setBootstrapServers(servers) + .setMessageFormat(createProtoFormat("class.path")) + .build()) + .setTimestampField(timestampColumn) + .build(); + } + + public static ValueProto.Value createEmptyValue() { + return ValueProto.Value.newBuilder().build(); + } + + public static ValueProto.Value createStrValue(String val) { + return ValueProto.Value.newBuilder().setStringVal(val).build(); + } + + public static ValueProto.Value createDoubleValue(double value) { + return ValueProto.Value.newBuilder().setDoubleVal(value).build(); + } + + public static ValueProto.Value createInt32Value(int value) { + return ValueProto.Value.newBuilder().setInt32Val(value).build(); + } + + public static ValueProto.Value createInt64Value(long value) { + return ValueProto.Value.newBuilder().setInt64Val(value).build(); + } + + public static ServingAPIProto.FeatureReferenceV2 createFeatureReference( + String featureTableName, String featureName) { + return ServingAPIProto.FeatureReferenceV2.newBuilder() + .setFeatureViewName(featureTableName) + .setFeatureName(featureName) + .build(); + } + + public static ServingAPIProto.GetOnlineFeaturesRequestV2.EntityRow createEntityRow( + String entityName, ValueProto.Value entityValue, long seconds) { + return ServingAPIProto.GetOnlineFeaturesRequestV2.EntityRow.newBuilder() + .setTimestamp(Timestamp.newBuilder().setSeconds(seconds)) + .putFields(entityName, entityValue) + .build(); + } + + public static ServingAPIProto.GetOnlineFeaturesRequestV2.EntityRow createCompoundEntityRow( + ImmutableMap entityNameValues, long seconds) { + ServingAPIProto.GetOnlineFeaturesRequestV2.EntityRow.Builder entityRow = + ServingAPIProto.GetOnlineFeaturesRequestV2.EntityRow.newBuilder() + .setTimestamp(Timestamp.newBuilder().setSeconds(seconds)); + + entityNameValues.entrySet().stream() + .forEach(entry -> entityRow.putFields(entry.getKey(), entry.getValue())); + + return entityRow.build(); + } + + public static DataSource createKinesisDataSourceSpec( + String region, String streamName, String classPath, String timestampColumn) { + return DataSource.newBuilder() + .setType(DataSource.SourceType.STREAM_KINESIS) + .setKinesisOptions( + KinesisOptions.newBuilder() + .setRegion("ap-nowhere1") + .setStreamName("stream") + .setRecordFormat(createProtoFormat(classPath)) + .build()) + .setTimestampField(timestampColumn) + .build(); + } + + public static FileFormat createParquetFormat() { + return FileFormat.newBuilder().setParquetFormat(ParquetFormat.getDefaultInstance()).build(); + } + + public static StreamFormat createAvroFormat(String schemaJSON) { + return StreamFormat.newBuilder() + .setAvroFormat(AvroFormat.newBuilder().setSchemaJson(schemaJSON).build()) + .build(); + } + + public static StreamFormat createProtoFormat(String classPath) { + return StreamFormat.newBuilder() + .setProtoFormat(ProtoFormat.newBuilder().setClassPath(classPath).build()) + .build(); + } +} diff --git a/java/serving/src/test/java/feast/serving/util/RequestHelperTest.java b/java/serving/src/test/java/feast/serving/util/RequestHelperTest.java new file mode 100644 index 0000000000..fc19dbb02e --- /dev/null +++ b/java/serving/src/test/java/feast/serving/util/RequestHelperTest.java @@ -0,0 +1,55 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2020 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving.util; + +import feast.proto.serving.ServingAPIProto; +import org.junit.Test; + +public class RequestHelperTest { + + @Test(expected = IllegalArgumentException.class) + public void shouldErrorIfEntityRowEmpty() { + + ServingAPIProto.GetOnlineFeaturesRequest getOnlineFeaturesRequest = + ServingAPIProto.GetOnlineFeaturesRequest.newBuilder() + .setFeatures( + ServingAPIProto.FeatureList.newBuilder().addVal("view:featurename").build()) + .build(); + + RequestHelper.validateOnlineRequest(getOnlineFeaturesRequest); + } + + @Test(expected = IllegalArgumentException.class) + public void shouldErrorIfFeatureReferenceTableEmpty() { + ServingAPIProto.GetOnlineFeaturesRequest getOnlineFeaturesRequest = + ServingAPIProto.GetOnlineFeaturesRequest.newBuilder() + .setFeatures(ServingAPIProto.FeatureList.newBuilder().addVal("featurename").build()) + .build(); + + RequestHelper.validateOnlineRequest(getOnlineFeaturesRequest); + } + + @Test(expected = IllegalArgumentException.class) + public void shouldErrorIfFeatureReferenceNameEmpty() { + ServingAPIProto.GetOnlineFeaturesRequest getOnlineFeaturesRequest = + ServingAPIProto.GetOnlineFeaturesRequest.newBuilder() + .setFeatures(ServingAPIProto.FeatureList.newBuilder().addVal("view").build()) + .build(); + + RequestHelper.validateOnlineRequest(getOnlineFeaturesRequest); + } +} diff --git a/java/serving/src/test/resources/application-it.properties b/java/serving/src/test/resources/application-it.properties new file mode 100644 index 0000000000..a937f39bd5 --- /dev/null +++ b/java/serving/src/test/resources/application-it.properties @@ -0,0 +1,16 @@ +# +# Copyright 2018 The Feast Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +spring.main.allow-bean-definition-overriding=true \ No newline at end of file diff --git a/java/serving/src/test/resources/docker-compose/docker-compose-redis-it.yml b/java/serving/src/test/resources/docker-compose/docker-compose-redis-it.yml new file mode 100644 index 0000000000..1dee243cb8 --- /dev/null +++ b/java/serving/src/test/resources/docker-compose/docker-compose-redis-it.yml @@ -0,0 +1,15 @@ +version: '3' + +services: + redis: + image: redis:6.2 + command: redis-server --requirepass testpw + ports: + - "6379:6379" + feast: + build: feast10 + ports: + - "8080:8080" + links: + - redis + diff --git a/java/serving/src/test/resources/docker-compose/feast10/Dockerfile b/java/serving/src/test/resources/docker-compose/feast10/Dockerfile new file mode 100644 index 0000000000..df14bb592b --- /dev/null +++ b/java/serving/src/test/resources/docker-compose/feast10/Dockerfile @@ -0,0 +1,15 @@ +FROM python:3.7 + +WORKDIR /usr/src/ + +COPY requirements.txt ./ +RUN pip install --no-cache-dir -r requirements.txt + +RUN git clone https://github.com/feast-dev/feast.git /root/feast +RUN cd /root/feast/sdk/python && pip install -e '.[redis]' + +WORKDIR /app +COPY . . +EXPOSE 8080 + +CMD ["/bin/sh", "-c", "python materialize.py && feast serve_transformations --port 8080"] diff --git a/java/serving/src/test/resources/docker-compose/feast10/definitions.py b/java/serving/src/test/resources/docker-compose/feast10/definitions.py new file mode 100644 index 0000000000..374408a20b --- /dev/null +++ b/java/serving/src/test/resources/docker-compose/feast10/definitions.py @@ -0,0 +1,89 @@ +import pandas as pd +from feast.data_source import RequestSource +from feast.entity import Entity +from feast.feature_service import FeatureService +from feast.feature_view import FeatureView +from feast.field import Field +from feast.on_demand_feature_view import on_demand_feature_view +from feast.types import Float32, Float64, Int64 +from feast.value_type import ValueType +from google.protobuf.duration_pb2 import Duration +from feast import FileSource + +file_path = "driver_stats.parquet" +driver_hourly_stats = FileSource( + path=file_path, + timestamp_field="event_timestamp", + created_timestamp_column="created", +) + +# Define an entity for the driver. You can think of entity as a primary key used to +# fetch features. +driver = Entity(name="driver_id", value_type=ValueType.INT64, description="driver id",) + +# Our parquet files contain sample data that includes a driver_id column, timestamps and +# three feature column. Here we define a Feature View that will allow us to serve this +# data to our model online. +driver_hourly_stats_view = FeatureView( + name="driver_hourly_stats", + entities=["driver_id"], + ttl=Duration(seconds=86400 * 7), + schema=[ + Field(name="conv_rate", dtype=Float64), + Field(name="acc_rate", dtype=Float32), + Field(name="avg_daily_trips", dtype=Int64), + ], + online=True, + batch_source=driver_hourly_stats, + tags={}, +) + + +input_request = RequestSource( + name="vals_to_add", + schema=[ + Field(name="val_to_add", dtype=Int64), + Field(name="val_to_add_2", dtype=Int64), + Field(name="avg_daily_trips", dtype=Int64), + ], +) + + +@on_demand_feature_view( + sources={ + "driver_hourly_stats": driver_hourly_stats_view, + "vals_to_add": input_request, + }, + schema=[ + Field(name="conv_rate_plus_val1", dtype=Float64), + Field(name="conv_rate_plus_val2", dtype=Float64), + ], +) +def transformed_conv_rate(features_df: pd.DataFrame) -> pd.DataFrame: + df = pd.DataFrame() + df["conv_rate_plus_val1"] = features_df["conv_rate"] + features_df["val_to_add"] + df["conv_rate_plus_val2"] = features_df["conv_rate"] + features_df["val_to_add_2"] + return df + + +generated_data_source = FileSource( + path="benchmark_data.parquet", timestamp_field="event_timestamp", +) + +entity = Entity(name="entity", value_type=ValueType.STRING,) + +benchmark_feature_views = [ + FeatureView( + name=f"feature_view_{i}", + entities=["entity"], + ttl=Duration(seconds=86400), + schema=[Field(name=f"feature_{10 * i + j}", dtype=Int64) for j in range(10)], + online=True, + batch_source=generated_data_source, + ) + for i in range(25) +] + +benchmark_feature_service = FeatureService( + name=f"benchmark_feature_service", features=benchmark_feature_views, +) diff --git a/java/serving/src/test/resources/docker-compose/feast10/feature_store.yaml b/java/serving/src/test/resources/docker-compose/feast10/feature_store.yaml new file mode 100644 index 0000000000..2e6625c025 --- /dev/null +++ b/java/serving/src/test/resources/docker-compose/feast10/feature_store.yaml @@ -0,0 +1,10 @@ +project: feast_project +registry: registry.db +provider: local +online_store: + type: redis + connection_string: "redis:6379,password=testpw" +offline_store: {} +flags: + alpha_features: true + on_demand_transforms: true diff --git a/java/serving/src/test/resources/docker-compose/feast10/materialize.py b/java/serving/src/test/resources/docker-compose/feast10/materialize.py new file mode 100644 index 0000000000..404fec27e1 --- /dev/null +++ b/java/serving/src/test/resources/docker-compose/feast10/materialize.py @@ -0,0 +1,75 @@ +from datetime import datetime, timedelta + +import numpy as np +import pandas as pd +from definitions import ( + benchmark_feature_service, + benchmark_feature_views, + driver, + driver_hourly_stats_view, + entity, + transformed_conv_rate, +) + +from feast import FeatureStore + +print("Running materialize.py") + +# Fill our temporary data source +start = datetime.now() - timedelta(days=10) + +df = pd.DataFrame() +df["driver_id"] = np.arange(1000, 1010) +df["created"] = datetime.now() +df["conv_rate"] = np.arange(0, 1, 0.1) +df["acc_rate"] = np.arange(0.5, 1, 0.05) +df["avg_daily_trips"] = np.arange(0, 1000, 100) + +# some of rows are beyond 7 days to test OUTSIDE_MAX_AGE status +df["event_timestamp"] = start + pd.Series(np.arange(0, 10)).map( + lambda days: timedelta(days=days) +) + +# Store data in parquet files. Parquet is convenient for local development mode. For +# production, you can use your favorite DWH, such as BigQuery. See Feast documentation +# for more info. +df.to_parquet("driver_stats.parquet") + + +# For Benchmarks +# Please read more in Feast RFC-031 +# (link https://docs.google.com/document/d/12UuvTQnTTCJhdRgy6h10zSbInNGSyEJkIxpOcgOen1I/edit) +# about this benchmark setup +def generate_data(num_rows: int, num_features: int, destination: str) -> pd.DataFrame: + features = [f"feature_{i}" for i in range(num_features)] + columns = ["entity", "event_timestamp"] + features + df = pd.DataFrame(0, index=np.arange(num_rows), columns=columns) + df["event_timestamp"] = datetime.utcnow() + for column in features: + df[column] = np.random.randint(1, num_rows, num_rows) + + df["entity"] = "key-" + pd.Series(np.arange(1, num_rows + 1)).astype( + pd.StringDtype() + ) + + df.to_parquet(destination) + + +generate_data(10 ** 3, 250, "benchmark_data.parquet") + + +fs = FeatureStore(".") +fs.apply( + [ + driver_hourly_stats_view, + transformed_conv_rate, + driver, + entity, + benchmark_feature_service, + *benchmark_feature_views, + ] +) + +now = datetime.now() +fs.materialize(start, now) +print("Materialization finished") diff --git a/java/serving/src/test/resources/docker-compose/feast10/registry.db b/java/serving/src/test/resources/docker-compose/feast10/registry.db new file mode 100644 index 0000000000..746934e3d0 Binary files /dev/null and b/java/serving/src/test/resources/docker-compose/feast10/registry.db differ diff --git a/java/serving/src/test/resources/docker-compose/feast10/requirements.txt b/java/serving/src/test/resources/docker-compose/feast10/requirements.txt new file mode 100644 index 0000000000..94e4771de2 --- /dev/null +++ b/java/serving/src/test/resources/docker-compose/feast10/requirements.txt @@ -0,0 +1,6 @@ +# for source generation +pyarrow==6.0.0 + +# temp fixes +proto-plus +Jinja2>=2.0.0 \ No newline at end of file diff --git a/java/serving/src/test/resources/mockito-extensions/org.mockito.plugins.MockMaker b/java/serving/src/test/resources/mockito-extensions/org.mockito.plugins.MockMaker new file mode 100644 index 0000000000..ca6ee9cea8 --- /dev/null +++ b/java/serving/src/test/resources/mockito-extensions/org.mockito.plugins.MockMaker @@ -0,0 +1 @@ +mock-maker-inline \ No newline at end of file diff --git a/java/storage/api/pom.xml b/java/storage/api/pom.xml new file mode 100644 index 0000000000..90f656e281 --- /dev/null +++ b/java/storage/api/pom.xml @@ -0,0 +1,72 @@ + + + + dev.feast + feast-parent + ${revision} + ../../pom.xml + + + 4.0.0 + feast-storage-api + + Feast Storage API + + + + + org.apache.maven.plugins + maven-dependency-plugin + + + + javax.annotation + + + + + + + + + + dev.feast + feast-datatypes + ${project.version} + + + + com.google.auto.value + auto-value-annotations + 1.6.6 + + + + com.google.auto.value + auto-value + 1.6.6 + provided + + + + org.apache.commons + commons-lang3 + 3.9 + + + + org.apache.avro + avro + 1.10.2 + + + + junit + junit + 4.13.2 + test + + + + diff --git a/java/storage/api/src/main/java/feast/storage/api/retriever/AvroFeature.java b/java/storage/api/src/main/java/feast/storage/api/retriever/AvroFeature.java new file mode 100644 index 0000000000..96f19ccdbe --- /dev/null +++ b/java/storage/api/src/main/java/feast/storage/api/retriever/AvroFeature.java @@ -0,0 +1,171 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2021 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.storage.api.retriever; + +import com.google.protobuf.ByteString; +import com.google.protobuf.Timestamp; +import feast.proto.serving.ServingAPIProto; +import feast.proto.types.ValueProto; +import java.nio.ByteBuffer; +import java.util.stream.Collectors; +import org.apache.avro.generic.GenericData; +import org.apache.avro.util.Utf8; + +public class AvroFeature implements Feature { + private final ServingAPIProto.FeatureReferenceV2 featureReference; + + private final Timestamp eventTimestamp; + + private final Object featureValue; + + public AvroFeature( + ServingAPIProto.FeatureReferenceV2 featureReference, + Timestamp eventTimestamp, + Object featureValue) { + this.featureReference = featureReference; + this.eventTimestamp = eventTimestamp; + this.featureValue = featureValue; + } + + /** + * Casts feature value of Object type based on Feast valueType. Empty object i.e new Object() is + * interpreted as VAL_NOT_SET Feast valueType. + * + * @param valueType Feast valueType of feature as specified in FeatureSpec + * @return ValueProto.Value representation of feature + */ + @Override + public ValueProto.Value getFeatureValue(ValueProto.ValueType.Enum valueType) { + ValueProto.Value finalValue; + + try { + switch (valueType) { + case STRING: + finalValue = + ValueProto.Value.newBuilder().setStringVal(((Utf8) featureValue).toString()).build(); + break; + case INT32: + finalValue = ValueProto.Value.newBuilder().setInt32Val((Integer) featureValue).build(); + break; + case INT64: + finalValue = ValueProto.Value.newBuilder().setInt64Val((Long) featureValue).build(); + break; + case DOUBLE: + finalValue = ValueProto.Value.newBuilder().setDoubleVal((Double) featureValue).build(); + break; + case FLOAT: + finalValue = ValueProto.Value.newBuilder().setFloatVal((Float) featureValue).build(); + break; + case BYTES: + finalValue = + ValueProto.Value.newBuilder() + .setBytesVal(ByteString.copyFrom(((ByteBuffer) featureValue).array())) + .build(); + break; + case BOOL: + finalValue = ValueProto.Value.newBuilder().setBoolVal((Boolean) featureValue).build(); + break; + case STRING_LIST: + finalValue = + ValueProto.Value.newBuilder() + .setStringListVal( + ValueProto.StringList.newBuilder() + .addAllVal( + ((GenericData.Array) featureValue) + .stream().map(Utf8::toString).collect(Collectors.toList())) + .build()) + .build(); + break; + case INT64_LIST: + finalValue = + ValueProto.Value.newBuilder() + .setInt64ListVal( + ValueProto.Int64List.newBuilder() + .addAllVal(((GenericData.Array) featureValue)) + .build()) + .build(); + break; + case INT32_LIST: + finalValue = + ValueProto.Value.newBuilder() + .setInt32ListVal( + ValueProto.Int32List.newBuilder() + .addAllVal(((GenericData.Array) featureValue)) + .build()) + .build(); + break; + case FLOAT_LIST: + finalValue = + ValueProto.Value.newBuilder() + .setFloatListVal( + ValueProto.FloatList.newBuilder() + .addAllVal(((GenericData.Array) featureValue)) + .build()) + .build(); + break; + case DOUBLE_LIST: + finalValue = + ValueProto.Value.newBuilder() + .setDoubleListVal( + ValueProto.DoubleList.newBuilder() + .addAllVal(((GenericData.Array) featureValue)) + .build()) + .build(); + break; + case BOOL_LIST: + finalValue = + ValueProto.Value.newBuilder() + .setBoolListVal( + ValueProto.BoolList.newBuilder() + .addAllVal(((GenericData.Array) featureValue)) + .build()) + .build(); + break; + case BYTES_LIST: + finalValue = + ValueProto.Value.newBuilder() + .setBytesListVal( + ValueProto.BytesList.newBuilder() + .addAllVal( + ((GenericData.Array) featureValue) + .stream() + .map(byteBuffer -> ByteString.copyFrom(byteBuffer.array())) + .collect(Collectors.toList())) + .build()) + .build(); + break; + default: + throw new RuntimeException("FeatureType is not supported"); + } + } catch (ClassCastException e) { + // Feature type has changed + finalValue = ValueProto.Value.newBuilder().build(); + } + + return finalValue; + } + + @Override + public ServingAPIProto.FeatureReferenceV2 getFeatureReference() { + return this.featureReference; + } + + @Override + public Timestamp getEventTimestamp() { + return this.eventTimestamp; + } +} diff --git a/java/storage/api/src/main/java/feast/storage/api/retriever/Feature.java b/java/storage/api/src/main/java/feast/storage/api/retriever/Feature.java new file mode 100644 index 0000000000..92ae1f31fb --- /dev/null +++ b/java/storage/api/src/main/java/feast/storage/api/retriever/Feature.java @@ -0,0 +1,52 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2020 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.storage.api.retriever; + +import com.google.protobuf.Timestamp; +import feast.proto.serving.ServingAPIProto.FeatureReferenceV2; +import feast.proto.types.ValueProto; +import feast.proto.types.ValueProto.Value; +import java.util.HashMap; + +public interface Feature { + + HashMap TYPE_TO_VAL_CASE = + new HashMap() { + { + put(ValueProto.ValueType.Enum.BYTES, ValueProto.Value.ValCase.BYTES_VAL); + put(ValueProto.ValueType.Enum.STRING, ValueProto.Value.ValCase.STRING_VAL); + put(ValueProto.ValueType.Enum.INT32, ValueProto.Value.ValCase.INT32_VAL); + put(ValueProto.ValueType.Enum.INT64, ValueProto.Value.ValCase.INT64_VAL); + put(ValueProto.ValueType.Enum.DOUBLE, ValueProto.Value.ValCase.DOUBLE_VAL); + put(ValueProto.ValueType.Enum.FLOAT, ValueProto.Value.ValCase.FLOAT_VAL); + put(ValueProto.ValueType.Enum.BOOL, ValueProto.Value.ValCase.BOOL_VAL); + put(ValueProto.ValueType.Enum.BYTES_LIST, ValueProto.Value.ValCase.BYTES_LIST_VAL); + put(ValueProto.ValueType.Enum.STRING_LIST, ValueProto.Value.ValCase.STRING_LIST_VAL); + put(ValueProto.ValueType.Enum.INT32_LIST, ValueProto.Value.ValCase.INT32_LIST_VAL); + put(ValueProto.ValueType.Enum.INT64_LIST, ValueProto.Value.ValCase.INT64_LIST_VAL); + put(ValueProto.ValueType.Enum.DOUBLE_LIST, ValueProto.Value.ValCase.DOUBLE_LIST_VAL); + put(ValueProto.ValueType.Enum.FLOAT_LIST, ValueProto.Value.ValCase.FLOAT_LIST_VAL); + put(ValueProto.ValueType.Enum.BOOL_LIST, ValueProto.Value.ValCase.BOOL_LIST_VAL); + } + }; + + Value getFeatureValue(ValueProto.ValueType.Enum valueType); + + FeatureReferenceV2 getFeatureReference(); + + Timestamp getEventTimestamp(); +} diff --git a/java/storage/api/src/main/java/feast/storage/api/retriever/FeatureTableRequest.java b/java/storage/api/src/main/java/feast/storage/api/retriever/FeatureTableRequest.java new file mode 100644 index 0000000000..2f181e6de8 --- /dev/null +++ b/java/storage/api/src/main/java/feast/storage/api/retriever/FeatureTableRequest.java @@ -0,0 +1,62 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2020 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.storage.api.retriever; + +import com.google.auto.value.AutoValue; +import com.google.common.collect.ImmutableSet; +import feast.proto.core.FeatureTableProto.FeatureTableSpec; +import feast.proto.serving.ServingAPIProto.FeatureReferenceV2; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +@AutoValue +public abstract class FeatureTableRequest { + public abstract FeatureTableSpec getSpec(); + + public abstract ImmutableSet getFeatureReferences(); + + public static Builder newBuilder() { + return new AutoValue_FeatureTableRequest.Builder(); + } + + @AutoValue.Builder + public abstract static class Builder { + public abstract Builder setSpec(FeatureTableSpec spec); + + abstract ImmutableSet.Builder featureReferencesBuilder(); + + public Builder addAllFeatureReferences(List featureReferenceList) { + featureReferencesBuilder().addAll(featureReferenceList); + return this; + } + + public Builder addFeatureReference(FeatureReferenceV2 featureReference) { + featureReferencesBuilder().add(featureReference); + return this; + } + + public abstract FeatureTableRequest build(); + } + + public Map getFeatureRefsByName() { + return getFeatureReferences().stream() + .collect( + Collectors.toMap( + FeatureReferenceV2::getFeatureName, featureReference -> featureReference)); + } +} diff --git a/java/storage/api/src/main/java/feast/storage/api/retriever/OnlineRetrieverV2.java b/java/storage/api/src/main/java/feast/storage/api/retriever/OnlineRetrieverV2.java new file mode 100644 index 0000000000..fde8ba7396 --- /dev/null +++ b/java/storage/api/src/main/java/feast/storage/api/retriever/OnlineRetrieverV2.java @@ -0,0 +1,45 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2020 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.storage.api.retriever; + +import feast.proto.serving.ServingAPIProto; +import feast.proto.types.ValueProto; +import java.util.List; +import java.util.Map; + +public interface OnlineRetrieverV2 { + /** + * Get online features for the given entity rows using data retrieved from the Feature references + * specified in FeatureTable request. + * + *

Each {@link Feature} optional in the returned list then corresponds to an {@link + * ServingAPIProto.GetOnlineFeaturesRequestV2.EntityRow} provided by the user. If feature for a + * given entity row is not found, will return an empty optional instead. The no. of {@link + * Feature} returned should match the no. of given {@link + * ServingAPIProto.GetOnlineFeaturesRequestV2.EntityRow}s + * + * @param entityRows list of entity rows to request features for. + * @param featureReferences specifies the FeatureTable to retrieve data from + * @param entityNames name of entities + * @return list of {@link Feature}s corresponding to data retrieved for each entity row from + * FeatureTable specified in FeatureTable request. + */ + List> getOnlineFeatures( + List> entityRows, + List featureReferences, + List entityNames); +} diff --git a/java/storage/api/src/main/java/feast/storage/api/retriever/ProtoFeature.java b/java/storage/api/src/main/java/feast/storage/api/retriever/ProtoFeature.java new file mode 100644 index 0000000000..09f6b75f49 --- /dev/null +++ b/java/storage/api/src/main/java/feast/storage/api/retriever/ProtoFeature.java @@ -0,0 +1,63 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2021 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.storage.api.retriever; + +import com.google.protobuf.Timestamp; +import feast.proto.serving.ServingAPIProto; +import feast.proto.types.ValueProto; + +public class ProtoFeature implements Feature { + private final ServingAPIProto.FeatureReferenceV2 featureReference; + + private final Timestamp eventTimestamp; + + private final ValueProto.Value featureValue; + + public ProtoFeature( + ServingAPIProto.FeatureReferenceV2 featureReference, + Timestamp eventTimestamp, + ValueProto.Value featureValue) { + this.featureReference = featureReference; + this.eventTimestamp = eventTimestamp; + this.featureValue = featureValue; + } + + /** + * Returns Feast valueType if type matches, otherwise null. + * + * @param valueType Feast valueType of feature as specified in FeatureSpec + * @return ValueProto.Value representation of feature + */ + @Override + public ValueProto.Value getFeatureValue(ValueProto.ValueType.Enum valueType) { + if (TYPE_TO_VAL_CASE.get(valueType) != this.featureValue.getValCase()) { + return null; + } + + return this.featureValue; + } + + @Override + public ServingAPIProto.FeatureReferenceV2 getFeatureReference() { + return this.featureReference; + } + + @Override + public Timestamp getEventTimestamp() { + return this.eventTimestamp; + } +} diff --git a/java/storage/connectors/pom.xml b/java/storage/connectors/pom.xml new file mode 100644 index 0000000000..11e32a154c --- /dev/null +++ b/java/storage/connectors/pom.xml @@ -0,0 +1,61 @@ + + + + dev.feast + feast-parent + ${revision} + ../../pom.xml + + + 4.0.0 + feast-storage-connectors + pom + + Feast Storage Connectors + + + redis + + + + + + org.apache.maven.plugins + maven-dependency-plugin + + + + javax.annotation + + + + + + org.jacoco + jacoco-maven-plugin + + + + + + + dev.feast + feast-datatypes + ${project.version} + + + + dev.feast + feast-common + ${project.version} + + + + dev.feast + feast-storage-api + ${project.version} + + + + diff --git a/java/storage/connectors/redis/pom.xml b/java/storage/connectors/redis/pom.xml new file mode 100644 index 0000000000..ce25f41da6 --- /dev/null +++ b/java/storage/connectors/redis/pom.xml @@ -0,0 +1,104 @@ + + + + dev.feast + feast-storage-connectors + ${revision} + + + 4.0.0 + feast-storage-connector-redis + + Feast Storage Connector for Redis + + + + io.lettuce + lettuce-core + 6.0.2.RELEASE + + + + io.netty + netty-transport-native-epoll + 4.1.52.Final + linux-x86_64 + + + + org.apache.commons + commons-lang3 + 3.9 + + + + com.google.auto.value + auto-value-annotations + 1.6.6 + + + + com.google.auto.value + auto-value + 1.6.6 + provided + + + + com.google.guava + guava + ${guava.version} + + + + org.mockito + mockito-core + ${mockito.version} + test + + + + + com.github.kstyrc + embedded-redis + 0.6 + test + + + + org.hamcrest + hamcrest-core + test + ${hamcrest.version} + + + + org.hamcrest + hamcrest-library + test + ${hamcrest.version} + + + + net.ishiis.redis + redis-unit + 1.0.3 + test + + + + + junit + junit + 4.13.2 + test + + + org.slf4j + slf4j-simple + 1.7.32 + test + + + diff --git a/java/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/common/RedisHashDecoder.java b/java/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/common/RedisHashDecoder.java new file mode 100644 index 0000000000..78b64fd141 --- /dev/null +++ b/java/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/common/RedisHashDecoder.java @@ -0,0 +1,99 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2020 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.storage.connectors.redis.common; + +import com.google.common.hash.Hashing; +import com.google.protobuf.InvalidProtocolBufferException; +import com.google.protobuf.Timestamp; +import feast.proto.serving.ServingAPIProto; +import feast.proto.types.ValueProto; +import feast.storage.api.retriever.Feature; +import feast.storage.api.retriever.ProtoFeature; +import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; +import java.util.*; +import java.util.stream.Collectors; + +public class RedisHashDecoder { + + /** + * Converts all retrieved Redis Hash values based on EntityRows into {@link Feature} + * + * @param redisHashValues retrieved Redis Hash values based on EntityRows + * @param byteToFeatureIdxMap map to decode bytes back to FeatureReference + * @param timestampPrefix timestamp prefix + * @return Map of {@link ServingAPIProto.FeatureReferenceV2} to {@link Feature} + */ + public static List retrieveFeature( + Map redisHashValues, + Map byteToFeatureIdxMap, + List featureReferences, + String timestampPrefix) { + Map featureTableTimestampMap = + redisHashValues.entrySet().stream() + .filter(e -> new String(e.getKey()).startsWith(timestampPrefix)) + .collect( + Collectors.toMap( + e -> new String(e.getKey()).substring(timestampPrefix.length() + 1), + e -> { + try { + return Timestamp.parseFrom(e.getValue()); + } catch (InvalidProtocolBufferException ex) { + throw new RuntimeException( + "Couldn't parse timestamp proto while pulling data from Redis"); + } + })); + List results = new ArrayList<>(Collections.nCopies(featureReferences.size(), null)); + + for (Map.Entry entry : redisHashValues.entrySet()) { + Integer featureIdx = byteToFeatureIdxMap.get(ByteBuffer.wrap(entry.getKey())); + if (featureIdx == null) { + continue; + } + + ValueProto.Value v; + try { + v = ValueProto.Value.parseFrom(entry.getValue()); + } catch (InvalidProtocolBufferException ex) { + throw new RuntimeException( + "Couldn't parse feature value proto while pulling data from Redis"); + } + results.set( + featureIdx, + new ProtoFeature( + featureReferences.get(featureIdx), + featureTableTimestampMap.get(featureReferences.get(featureIdx).getFeatureViewName()), + v)); + } + + return results; + } + + public static byte[] getTimestampRedisHashKeyBytes(String featureTable, String timestampPrefix) { + String timestampRedisHashKeyStr = timestampPrefix + ":" + featureTable; + return timestampRedisHashKeyStr.getBytes(); + } + + public static byte[] getFeatureReferenceRedisHashKeyBytes( + ServingAPIProto.FeatureReferenceV2 featureReference) { + String delimitedFeatureReference = + featureReference.getFeatureViewName() + ":" + featureReference.getFeatureName(); + return Hashing.murmur3_32() + .hashString(delimitedFeatureReference, StandardCharsets.UTF_8) + .asBytes(); + } +} diff --git a/java/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/common/RedisKeyGenerator.java b/java/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/common/RedisKeyGenerator.java new file mode 100644 index 0000000000..389ca0abfd --- /dev/null +++ b/java/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/common/RedisKeyGenerator.java @@ -0,0 +1,61 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2020 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.storage.connectors.redis.common; + +import feast.proto.serving.ServingAPIProto; +import feast.proto.storage.RedisProto; +import feast.proto.types.ValueProto; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +public class RedisKeyGenerator { + + public static List buildRedisKeys( + String project, List> entityRows) { + List redisKeys = + entityRows.stream() + .map(entityRow -> makeRedisKey(project, entityRow)) + .collect(Collectors.toList()); + + return redisKeys; + } + + /** + * Create {@link RedisProto.RedisKeyV2} + * + * @param project Project where request for features was called from + * @param entityRow {@link ServingAPIProto.GetOnlineFeaturesRequestV2.EntityRow} + * @return {@link RedisProto.RedisKeyV2} + */ + private static RedisProto.RedisKeyV2 makeRedisKey( + String project, Map entityRow) { + RedisProto.RedisKeyV2.Builder builder = RedisProto.RedisKeyV2.newBuilder().setProject(project); + List entityNames = new ArrayList<>(new HashSet<>(entityRow.keySet())); + + // Sort entity names by alphabetical order + entityNames.sort(String::compareTo); + + for (String entityName : entityNames) { + builder.addEntityNames(entityName); + builder.addEntityValues(entityRow.get(entityName)); + } + return builder.build(); + } +} diff --git a/java/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/retriever/EntityKeySerializer.java b/java/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/retriever/EntityKeySerializer.java new file mode 100644 index 0000000000..6220dd29d4 --- /dev/null +++ b/java/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/retriever/EntityKeySerializer.java @@ -0,0 +1,24 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2021 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.storage.connectors.redis.retriever; + +import feast.proto.storage.RedisProto; + +@FunctionalInterface +public interface EntityKeySerializer { + byte[] serialize(final RedisProto.RedisKeyV2 entityKey); +} diff --git a/java/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/retriever/EntityKeySerializerV2.java b/java/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/retriever/EntityKeySerializerV2.java new file mode 100644 index 0000000000..3e9ab7e8ab --- /dev/null +++ b/java/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/retriever/EntityKeySerializerV2.java @@ -0,0 +1,120 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2021 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.storage.connectors.redis.retriever; + +import com.google.protobuf.ProtocolStringList; +import feast.proto.storage.RedisProto; +import feast.proto.types.ValueProto; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.nio.charset.StandardCharsets; +import java.util.*; +import org.apache.commons.lang3.ArrayUtils; +import org.apache.commons.lang3.tuple.Pair; + +// This is derived from +// https://github.com/feast-dev/feast/blob/b1ccf8dd1535f721aee8bea937ee38feff80bec5/sdk/python/feast/infra/key_encoding_utils.py#L22 +// and must be kept up to date with any changes in that logic. +public class EntityKeySerializerV2 implements EntityKeySerializer { + + @Override + public byte[] serialize(RedisProto.RedisKeyV2 entityKey) { + final ProtocolStringList joinKeys = entityKey.getEntityNamesList(); + final List values = entityKey.getEntityValuesList(); + + assert joinKeys.size() == values.size(); + + final List buffer = new ArrayList<>(); + + final List> tuples = new ArrayList<>(joinKeys.size()); + for (int i = 0; i < joinKeys.size(); i++) { + tuples.add(Pair.of(joinKeys.get(i), values.get(i))); + } + tuples.sort(Comparator.comparing(Pair::getLeft)); + + for (Pair pair : tuples) { + buffer.addAll(encodeInteger(ValueProto.ValueType.Enum.STRING.getNumber())); + buffer.addAll(encodeString(pair.getLeft())); + } + + for (Pair pair : tuples) { + final ValueProto.Value val = pair.getRight(); + switch (val.getValCase()) { + case STRING_VAL: + String stringVal = val.getStringVal(); + + buffer.addAll(encodeInteger(ValueProto.ValueType.Enum.STRING.getNumber())); + buffer.addAll(encodeInteger(stringVal.length())); + buffer.addAll(encodeString(stringVal)); + + break; + case BYTES_VAL: + byte[] bytes = val.getBytesVal().toByteArray(); + + buffer.addAll(encodeInteger(ValueProto.ValueType.Enum.BYTES.getNumber())); + buffer.addAll(encodeInteger(bytes.length)); + buffer.addAll(encodeBytes(bytes)); + + break; + case INT32_VAL: + buffer.addAll(encodeInteger(ValueProto.ValueType.Enum.INT32.getNumber())); + buffer.addAll(encodeInteger(Integer.BYTES)); + buffer.addAll(encodeInteger(val.getInt32Val())); + + break; + case INT64_VAL: + buffer.addAll(encodeInteger(ValueProto.ValueType.Enum.INT64.getNumber())); + buffer.addAll(encodeInteger(Integer.BYTES)); + /* This is super dumb - but in https://github.com/feast-dev/feast/blob/dcae1606f53028ce5413567fb8b66f92cfef0f8e/sdk/python/feast/infra/key_encoding_utils.py#L9 + we use `struct.pack(" encodeBytes(byte[] toByteArray) { + return Arrays.asList(ArrayUtils.toObject(toByteArray)); + } + + private List encodeInteger(Integer value) { + ByteBuffer buffer = ByteBuffer.allocate(Integer.BYTES); + buffer.order(ByteOrder.LITTLE_ENDIAN); + buffer.putInt(value); + + return Arrays.asList(ArrayUtils.toObject(buffer.array())); + } + + private List encodeString(String value) { + byte[] stringBytes = value.getBytes(StandardCharsets.UTF_8); + return encodeBytes(stringBytes); + } +} diff --git a/java/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/retriever/OnlineRetriever.java b/java/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/retriever/OnlineRetriever.java new file mode 100644 index 0000000000..a71812e875 --- /dev/null +++ b/java/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/retriever/OnlineRetriever.java @@ -0,0 +1,138 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2020 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.storage.connectors.redis.retriever; + +import com.google.common.collect.Lists; +import feast.proto.serving.ServingAPIProto; +import feast.proto.storage.RedisProto; +import feast.proto.types.ValueProto; +import feast.storage.api.retriever.Feature; +import feast.storage.api.retriever.OnlineRetrieverV2; +import feast.storage.connectors.redis.common.RedisHashDecoder; +import feast.storage.connectors.redis.common.RedisKeyGenerator; +import io.lettuce.core.KeyValue; +import java.nio.ByteBuffer; +import java.util.*; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Future; +import java.util.stream.Collectors; +import org.slf4j.Logger; + +public class OnlineRetriever implements OnlineRetrieverV2 { + + private static final Logger log = org.slf4j.LoggerFactory.getLogger(OnlineRetriever.class); + + private static final String timestampPrefix = "_ts"; + private final RedisClientAdapter redisClientAdapter; + private final EntityKeySerializer keySerializer; + private final String project; + + // Number of fields in request to Redis which requires using HGETALL instead of HMGET + public static final int HGETALL_NUMBER_OF_FIELDS_THRESHOLD = 50; + + public OnlineRetriever( + String project, RedisClientAdapter redisClientAdapter, EntityKeySerializer keySerializer) { + this.project = project; + this.redisClientAdapter = redisClientAdapter; + this.keySerializer = keySerializer; + } + + @Override + public List> getOnlineFeatures( + List> entityRows, + List featureReferences, + List entityNames) { + + List redisKeys = + RedisKeyGenerator.buildRedisKeys(this.project, entityRows); + return getFeaturesFromRedis(redisKeys, featureReferences); + } + + private List> getFeaturesFromRedis( + List redisKeys, + List featureReferences) { + // To decode bytes back to Feature + Map byteToFeatureIdxMap = new HashMap<>(); + + // Serialize using proto + List binaryRedisKeys = + redisKeys.stream().map(this.keySerializer::serialize).collect(Collectors.toList()); + + List retrieveFields = new ArrayList<>(); + for (int idx = 0; + idx < featureReferences.size(); + idx++) { // eg. murmur() + byte[] featureReferenceBytes = + RedisHashDecoder.getFeatureReferenceRedisHashKeyBytes(featureReferences.get(idx)); + retrieveFields.add(featureReferenceBytes); + + byteToFeatureIdxMap.put(ByteBuffer.wrap(featureReferenceBytes), idx); + } + + featureReferences.stream() + .map(ServingAPIProto.FeatureReferenceV2::getFeatureViewName) + .distinct() + .forEach( + table -> { + // eg. <_ts:featuretable_name> + byte[] featureTableTsBytes = + RedisHashDecoder.getTimestampRedisHashKeyBytes(table, timestampPrefix); + + retrieveFields.add(featureTableTsBytes); + }); + + List>> futures = + Lists.newArrayListWithExpectedSize(binaryRedisKeys.size()); + + // Number of fields that controls whether to use hmget or hgetall was discovered empirically + // Could be potentially tuned further + if (retrieveFields.size() < HGETALL_NUMBER_OF_FIELDS_THRESHOLD) { + byte[][] retrieveFieldsByteArray = retrieveFields.toArray(new byte[0][]); + + for (byte[] binaryRedisKey : binaryRedisKeys) { + // Access redis keys and extract features + futures.add( + redisClientAdapter + .hmget(binaryRedisKey, retrieveFieldsByteArray) + .thenApply( + list -> + list.stream() + .filter(KeyValue::hasValue) + .collect(Collectors.toMap(KeyValue::getKey, KeyValue::getValue))) + .toCompletableFuture()); + } + + } else { + for (byte[] binaryRedisKey : binaryRedisKeys) { + futures.add(redisClientAdapter.hgetall(binaryRedisKey)); + } + } + + List> results = Lists.newArrayListWithExpectedSize(futures.size()); + for (Future> f : futures) { + try { + results.add( + RedisHashDecoder.retrieveFeature( + f.get(), byteToFeatureIdxMap, featureReferences, timestampPrefix)); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException("Unexpected error when pulling data from Redis"); + } + } + + return results; + } +} diff --git a/java/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/retriever/RedisClient.java b/java/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/retriever/RedisClient.java new file mode 100644 index 0000000000..ea95ca9ace --- /dev/null +++ b/java/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/retriever/RedisClient.java @@ -0,0 +1,68 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2020 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.storage.connectors.redis.retriever; + +import io.lettuce.core.KeyValue; +import io.lettuce.core.RedisFuture; +import io.lettuce.core.RedisURI; +import io.lettuce.core.api.StatefulRedisConnection; +import io.lettuce.core.api.async.RedisAsyncCommands; +import io.lettuce.core.codec.ByteArrayCodec; +import java.util.List; +import java.util.Map; + +public class RedisClient implements RedisClientAdapter { + + private final RedisAsyncCommands asyncCommands; + + @Override + public RedisFuture>> hmget(byte[] key, byte[]... fields) { + return asyncCommands.hmget(key, fields); + } + + @Override + public RedisFuture> hgetall(byte[] key) { + return asyncCommands.hgetall(key); + } + + @Override + public void flushCommands() { + asyncCommands.flushCommands(); + } + + private RedisClient(StatefulRedisConnection connection) { + this.asyncCommands = connection.async(); + } + + public static RedisClientAdapter create(RedisStoreConfig config) { + + RedisURI uri = RedisURI.create(config.getHost(), config.getPort()); + + if (config.getSsl()) { + uri.setSsl(true); + } + + if (!config.getPassword().isEmpty()) { + uri.setPassword(config.getPassword()); + } + + StatefulRedisConnection connection = + io.lettuce.core.RedisClient.create(uri).connect(new ByteArrayCodec()); + + return new RedisClient(connection); + } +} diff --git a/java/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/retriever/RedisClientAdapter.java b/java/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/retriever/RedisClientAdapter.java new file mode 100644 index 0000000000..a2b870af6c --- /dev/null +++ b/java/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/retriever/RedisClientAdapter.java @@ -0,0 +1,29 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2020 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.storage.connectors.redis.retriever; + +import io.lettuce.core.*; +import java.util.List; +import java.util.Map; + +public interface RedisClientAdapter { + RedisFuture>> hmget(byte[] key, byte[]... fields); + + RedisFuture> hgetall(byte[] key); + + void flushCommands(); +} diff --git a/java/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/retriever/RedisClusterClient.java b/java/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/retriever/RedisClusterClient.java new file mode 100644 index 0000000000..d527f245ae --- /dev/null +++ b/java/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/retriever/RedisClusterClient.java @@ -0,0 +1,104 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2020 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.storage.connectors.redis.retriever; + +import io.lettuce.core.*; +import io.lettuce.core.cluster.ClusterClientOptions; +import io.lettuce.core.cluster.ClusterTopologyRefreshOptions; +import io.lettuce.core.cluster.api.StatefulRedisClusterConnection; +import io.lettuce.core.cluster.api.async.RedisAdvancedClusterAsyncCommands; +import io.lettuce.core.codec.ByteArrayCodec; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +public class RedisClusterClient implements RedisClientAdapter { + + private final RedisAdvancedClusterAsyncCommands asyncCommands; + + @Override + public RedisFuture>> hmget(byte[] key, byte[]... fields) { + return asyncCommands.hmget(key, fields); + } + + @Override + public RedisFuture> hgetall(byte[] key) { + return asyncCommands.hgetall(key); + } + + @Override + public void flushCommands() { + asyncCommands.flushCommands(); + } + + static class Builder { + private final StatefulRedisClusterConnection connection; + + Builder(StatefulRedisClusterConnection connection) { + this.connection = connection; + } + + RedisClusterClient build() { + return new RedisClusterClient(this); + } + } + + private RedisClusterClient(Builder builder) { + this.asyncCommands = builder.connection.async(); + + // allows reading from replicas + this.asyncCommands.readOnly(); + } + + public static RedisClientAdapter create(RedisClusterStoreConfig config) { + List redisURIList = + Arrays.stream(config.getConnectionString().split(",")) + .map( + hostPort -> { + String[] hostPortSplit = hostPort.trim().split(":"); + RedisURI redisURI = + RedisURI.create(hostPortSplit[0], Integer.parseInt(hostPortSplit[1])); + if (!config.getPassword().isEmpty()) { + redisURI.setPassword(config.getPassword()); + } + if (config.getSsl()) { + redisURI.setSsl(true); + } + redisURI.setTimeout(config.getTimeout()); + return redisURI; + }) + .collect(Collectors.toList()); + + io.lettuce.core.cluster.RedisClusterClient client = + io.lettuce.core.cluster.RedisClusterClient.create(redisURIList); + client.setOptions( + ClusterClientOptions.builder() + .socketOptions(SocketOptions.builder().keepAlive(true).tcpNoDelay(true).build()) + .timeoutOptions(TimeoutOptions.enabled(config.getTimeout())) + .pingBeforeActivateConnection(true) + .topologyRefreshOptions( + ClusterTopologyRefreshOptions.builder().enableAllAdaptiveRefreshTriggers().build()) + .build()); + + StatefulRedisClusterConnection connection = + client.connect(new ByteArrayCodec()); + connection.setReadFrom(config.getReadFrom()); + + return new Builder(connection).build(); + } +} diff --git a/java/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/retriever/RedisClusterStoreConfig.java b/java/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/retriever/RedisClusterStoreConfig.java new file mode 100644 index 0000000000..271b07759c --- /dev/null +++ b/java/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/retriever/RedisClusterStoreConfig.java @@ -0,0 +1,57 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2021 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.storage.connectors.redis.retriever; + +import io.lettuce.core.ReadFrom; +import java.time.Duration; + +public class RedisClusterStoreConfig { + private final String connectionString; + private final ReadFrom readFrom; + private final Duration timeout; + private final Boolean ssl; + private final String password; + + public RedisClusterStoreConfig( + String connectionString, ReadFrom readFrom, Duration timeout, Boolean ssl, String password) { + this.connectionString = connectionString; + this.readFrom = readFrom; + this.timeout = timeout; + this.ssl = ssl; + this.password = password; + } + + public String getConnectionString() { + return this.connectionString; + } + + public ReadFrom getReadFrom() { + return this.readFrom; + } + + public Duration getTimeout() { + return this.timeout; + } + + public Boolean getSsl() { + return ssl; + } + + public String getPassword() { + return password; + } +} diff --git a/java/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/retriever/RedisStoreConfig.java b/java/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/retriever/RedisStoreConfig.java new file mode 100644 index 0000000000..3045235883 --- /dev/null +++ b/java/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/retriever/RedisStoreConfig.java @@ -0,0 +1,47 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2021 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.storage.connectors.redis.retriever; + +public class RedisStoreConfig { + private final String host; + private final Integer port; + private final Boolean ssl; + private final String password; + + public RedisStoreConfig(String host, Integer port, Boolean ssl, String password) { + this.host = host; + this.port = port; + this.ssl = ssl; + this.password = password; + } + + public String getHost() { + return this.host; + } + + public Integer getPort() { + return this.port; + } + + public Boolean getSsl() { + return this.ssl; + } + + public String getPassword() { + return this.password; + } +} diff --git a/java/storage/connectors/redis/src/test/java/feast/storage/connectors/redis/test/TestUtil.java b/java/storage/connectors/redis/src/test/java/feast/storage/connectors/redis/test/TestUtil.java new file mode 100644 index 0000000000..66aba44bc2 --- /dev/null +++ b/java/storage/connectors/redis/src/test/java/feast/storage/connectors/redis/test/TestUtil.java @@ -0,0 +1,44 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2020 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.storage.connectors.redis.test; + +import java.io.IOException; +import redis.embedded.RedisServer; + +public class TestUtil { + public static class LocalRedis { + + private static RedisServer server; + + /** + * Start local Redis for used in testing at "localhost" + * + * @param port port number + * @throws IOException if Redis failed to start + */ + public static void start(int port) throws IOException { + server = new RedisServer(port); + server.start(); + } + + public static void stop() { + if (server != null) { + server.stop(); + } + } + } +} diff --git a/protos/feast/core/CoreService.proto b/protos/feast/core/CoreService.proto deleted file mode 100644 index e9b7b4c43b..0000000000 --- a/protos/feast/core/CoreService.proto +++ /dev/null @@ -1,306 +0,0 @@ -// -// Copyright 2018 The Feast Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; -package feast.core; - -option go_package = "github.com/feast-dev/feast/sdk/go/protos/feast/core"; -option java_outer_classname = "CoreServiceProto"; -option java_package = "feast.proto.core"; - -import "google/protobuf/timestamp.proto"; -import "tensorflow_metadata/proto/v0/statistics.proto"; -import "feast/core/Entity.proto"; -import "feast/core/Feature.proto"; -import "feast/core/FeatureTable.proto"; -import "feast/core/Store.proto"; - -service CoreService { - // Retrieve version information about this Feast deployment - rpc GetFeastCoreVersion (GetFeastCoreVersionRequest) returns (GetFeastCoreVersionResponse); - - // Returns a specific entity - rpc GetEntity (GetEntityRequest) returns (GetEntityResponse); - - // Returns all feature references and respective features matching that filter. If none are found - // an empty map will be returned - // If no filter is provided in the request, the response will contain all the features - // currently stored in the default project. - rpc ListFeatures (ListFeaturesRequest) returns (ListFeaturesResponse); - - // Retrieve store details given a filter. - // - // Returns all stores matching that filter. If none are found, an empty list will be returned. - // If no filter is provided in the request, the response will contain all the stores currently - // stored in the registry. - rpc ListStores (ListStoresRequest) returns (ListStoresResponse); - - // Create or update and existing entity. - // - // This function is idempotent - it will not create a new entity if schema does not change. - // Schema changes will update the entity if the changes are valid. - // Following changes are not valid: - // - Changes to name - // - Changes to type - rpc ApplyEntity (ApplyEntityRequest) returns (ApplyEntityResponse); - - // Returns all entity references and respective entities matching that filter. If none are found - // an empty map will be returned - // If no filter is provided in the request, the response will contain all the entities - // currently stored in the default project. - rpc ListEntities (ListEntitiesRequest) returns (ListEntitiesResponse); - - // Updates core with the configuration of the store. - // - // If the changes are valid, core will return the given store configuration in response, and - // start or update the necessary feature population jobs for the updated store. - rpc UpdateStore (UpdateStoreRequest) returns (UpdateStoreResponse); - - // Creates a project. Projects serve as namespaces within which resources like features will be - // created. Feature table names as must be unique within a project while field (Feature/Entity) names - // must be unique within a Feature Table. Project names themselves must be globally unique. - rpc CreateProject (CreateProjectRequest) returns (CreateProjectResponse); - - // Archives a project. Archived projects will continue to exist and function, but won't be visible - // through the Core API. Any existing ingestion or serving requests will continue to function, - // but will result in warning messages being logged. It is not possible to unarchive a project - // through the Core API - rpc ArchiveProject (ArchiveProjectRequest) returns (ArchiveProjectResponse); - - // Lists all projects active projects. - rpc ListProjects (ListProjectsRequest) returns (ListProjectsResponse); - - /* Feature Tables */ - // Create or update an existing feature table. - // This function is idempotent - it will not create a new feature table if the schema does not change. - // Schema changes will update the feature table if the changes are valid. - // All changes except the following are valid: - // - Changes to feature table name. - // - Changes to entities - // - Changes to feature name and type - rpc ApplyFeatureTable (ApplyFeatureTableRequest) returns (ApplyFeatureTableResponse); - - // List feature tables that match a given filter. - // Returns the references of the Feature Tables matching that filter. If none are found, - // an empty list will be returned. - // If no filter is provided in the request, the response will match all the feature - // tables currently stored in the registry. - rpc ListFeatureTables (ListFeatureTablesRequest) returns (ListFeatureTablesResponse); - - // Returns a specific feature table - rpc GetFeatureTable (GetFeatureTableRequest) returns (GetFeatureTableResponse); - - // Delete a specific feature table - rpc DeleteFeatureTable (DeleteFeatureTableRequest) returns (DeleteFeatureTableResponse); - -} - -// Request for a single entity -message GetEntityRequest { - // Name of entity (required). - string name = 1; - - // Name of project the entity belongs to. If omitted will default to 'default' project. - string project = 2; -} - -// Response containing a single entity -message GetEntityResponse { - feast.core.Entity entity = 1; -} - -// Retrieves details for all versions of a specific entity -message ListEntitiesRequest { - Filter filter = 1; - - message Filter { - // Optional. Specifies the name of the project to list Entities in. - // It is NOT possible to provide an asterisk with a string in order to do pattern matching. - // If unspecified, this field will default to the default project 'default'. - string project = 3; - - // Optional. User defined metadata for entity. - // Entities with all matching labels will be returned. - map labels = 4; - } -} - -message ListEntitiesResponse { - repeated feast.core.Entity entities = 1; -} - -message ListFeaturesRequest { - message Filter { - // User defined metadata for feature. - // Features with all matching labels will be returned. - map labels = 1; - - // List of entities contained within the featureSet that the feature belongs to. - // Only feature tables with these entities will be searched for features. - repeated string entities = 2; - - // Name of project that the feature tables belongs to. Filtering on projects is disabled. - // It is NOT possible to provide an asterisk with a string in order to do pattern matching. - // If unspecified this field will default to the default project 'default'. - string project = 3; - } - - Filter filter = 1; -} - -message ListFeaturesResponse { - reserved 1; - - map features = 2; -} - -message ListStoresRequest { - message Filter { - // Name of desired store. Regex is not supported in this query. - string name = 1; - } - - Filter filter = 1; -} - -message ListStoresResponse { - repeated feast.core.Store store = 1; -} - -message ApplyEntityRequest { - // If project is unspecified, will default to 'default' project. - // If project specified does not exist, the project would be automatically created. - feast.core.EntitySpecV2 spec = 1; - - // Name of project that this entity belongs to. - string project = 2; -} - -message ApplyEntityResponse { - feast.core.Entity entity = 1; -} - -message GetFeastCoreVersionRequest { -} - -message GetFeastCoreVersionResponse { - string version = 1; -} - -message UpdateStoreRequest { - feast.core.Store store = 1; -} - -message UpdateStoreResponse { - enum Status { - // Existing store config matching the given store id is identical to the given store config. - NO_CHANGE = 0; - - // New store created or existing config updated. - UPDATED = 1; - } - feast.core.Store store = 1; - Status status = 2; -} - -// Request to create a project -message CreateProjectRequest { - // Name of project (required) - string name = 1; -} - -// Response for creation of a project -message CreateProjectResponse { -} - -// Request for the archival of a project -message ArchiveProjectRequest { - // Name of project to be archived - string name = 1; -} - -// Response for archival of a project -message ArchiveProjectResponse { -} - -// Request for listing of projects -message ListProjectsRequest { -} - -// Response for listing of projects -message ListProjectsResponse { - // List of project names (archived projects are filtered out) - repeated string projects = 1; -} - -message UpdateFeatureSetStatusResponse {} - -message ApplyFeatureTableRequest { - // Optional. Name of the Project to apply the Feature Table to. - // If unspecified, will apply FeatureTable to the default project. - string project = 1; - // Feature Table specification to apply - FeatureTableSpec table_spec = 2; -} - -message ApplyFeatureTableResponse { - FeatureTable table = 1; -} - -message GetFeatureTableRequest { - // Optional. Name of the Project to retrieve the Feature Table from. - // If unspecified, will apply FeatureTable to the default project. - string project = 1; - - // Name of the FeatureTable to retrieve. - string name = 2; -} - -message GetFeatureTableResponse { - // The Feature Table retrieved. - FeatureTable table = 1; -} - -message ListFeatureTablesRequest { - message Filter { - // Optional. Specifies the name of the project to list Feature Tables in. - // If unspecified would list Feature Tables in the default project. - string project = 1; - - // Optional. Feature Tables with all matching labels will be returned. - // If unspecified would list Feature Tables without filtering by labels. - map labels = 3; - } - - // Filter used when listing Feature Tables - Filter filter = 1; -} - -message ListFeatureTablesResponse { - // List of matching Feature Tables - repeated FeatureTable tables = 1; -} - -message DeleteFeatureTableRequest { - // Optional. Name of the Project to delete the Feature Table from. - // If unspecified, will delete FeatureTable from the default project. - string project = 1; - - // Name of the FeatureTable to delete. - string name = 2; -} - -message DeleteFeatureTableResponse {} diff --git a/protos/feast/core/DataFormat.proto b/protos/feast/core/DataFormat.proto index 2926e08c63..9fd01e865c 100644 --- a/protos/feast/core/DataFormat.proto +++ b/protos/feast/core/DataFormat.proto @@ -18,7 +18,7 @@ syntax = "proto3"; package feast.core; -option go_package = "github.com/feast-dev/feast/sdk/go/protos/feast/core"; +option go_package = "github.com/feast-dev/feast/go/protos/feast/core"; option java_outer_classname = "DataFormatProto"; option java_package = "feast.proto.core"; diff --git a/protos/feast/core/DataSource.proto b/protos/feast/core/DataSource.proto index 099ba32d92..9e6028ccfa 100644 --- a/protos/feast/core/DataSource.proto +++ b/protos/feast/core/DataSource.proto @@ -18,36 +18,58 @@ syntax = "proto3"; package feast.core; -option go_package = "github.com/feast-dev/feast/sdk/go/protos/feast/core"; +option go_package = "github.com/feast-dev/feast/go/protos/feast/core"; option java_outer_classname = "DataSourceProto"; option java_package = "feast.proto.core"; import "feast/core/DataFormat.proto"; +import "feast/types/Value.proto"; +import "feast/core/Feature.proto"; // Defines a Data Source that can be used source Feature data +// Next available id: 28 message DataSource { // Field indexes should *not* be reused. Not sure if fields 6-10 were used previously or not, // but they are going to be reserved for backwards compatibility. reserved 6 to 10; // Type of Data Source. + // Next available id: 12 enum SourceType { INVALID = 0; BATCH_FILE = 1; + BATCH_SNOWFLAKE = 8; BATCH_BIGQUERY = 2; + BATCH_REDSHIFT = 5; STREAM_KAFKA = 3; STREAM_KINESIS = 4; - BATCH_REDSHIFT = 5; CUSTOM_SOURCE = 6; + REQUEST_SOURCE = 7; + PUSH_SOURCE = 9; + BATCH_TRINO = 10; + BATCH_SPARK = 11; } + + // Unique name of data source within the project + string name = 20; + + // Name of Feast project that this data source belongs to. + string project = 21; + + string description = 23; + + map tags = 24; + + string owner = 25; + SourceType type = 1; - // Defines mapping between fields in the sourced data + // Defines mapping between fields in the sourced data // and fields in parent FeatureTable. map field_mapping = 2; // Must specify event timestamp column name - string event_timestamp_column = 3; + string timestamp_field = 3; // (Optional) Specify partition column // useful for file sources @@ -58,8 +80,14 @@ message DataSource { // This is an internal field that is represents the python class for the data source object a proto object represents. // This should be set by feast, and not by users. + // The field is used primarily by custom data sources and is mandatory for them to set. Feast may set it for + // first party sources as well. string data_source_class_type = 17; + // Optional batch source for streaming sources for historical features and materialization. + DataSource batch_source = 26; + + // Defines options for DataSource that sources features from a file message FileOptions { FileFormat file_format = 1; @@ -68,13 +96,26 @@ message DataSource { // s3://path/to/file for AWS S3 storage // gs://path/to/file for GCP GCS storage // file:///path/to/file for local storage - string file_url = 2; + string uri = 2; + + // override AWS S3 storage endpoint with custom S3 endpoint + string s3_endpoint_override = 3; } // Defines options for DataSource that sources features from a BigQuery Query message BigQueryOptions { // Full table reference in the form of [project:dataset.table] - string table_ref = 1; + string table = 1; + + // SQL query that returns a table containing feature data. Must contain an event_timestamp column, and respective + // entity columns + string query = 2; + } + + // Defines options for DataSource that sources features from a Trino Query + message TrinoOptions { + // Full table reference in the form of [project:dataset.table] + string table = 1; // SQL query that returns a table containing feature data. Must contain an event_timestamp column, and respective // entity columns @@ -93,6 +134,7 @@ message DataSource { // Defines the stream data format encoding feature/entity data in Kafka messages. StreamFormat message_format = 3; + } // Defines options for DataSource that sources features from Kinesis records. @@ -118,6 +160,46 @@ message DataSource { // SQL query that returns a table containing feature data. Must contain an event_timestamp column, and respective // entity columns string query = 2; + + // Redshift schema name + string schema = 3; + + // Redshift database name + string database = 4; + } + + // Defines options for DataSource that sources features from a Snowflake Query + message SnowflakeOptions { + // Snowflake table name + string table = 1; + + // SQL query that returns a table containing feature data. Must contain an event_timestamp column, and respective + // entity columns + string query = 2; + + // Snowflake schema name + string schema = 3; + + // Snowflake schema name + string database = 4; + + // Snowflake warehouse name + string warehouse = 5; + } + + // Defines options for DataSource that sources features from a spark table/query + message SparkOptions { + // Table name + string table = 1; + + // Spark SQl query that returns the table, this is an alternative to `table` + string query = 2; + + // Path from which spark can read the table, this is an alternative to `table` + string path = 3; + + // Format of files at `path` (e.g. parquet, avro, etc) + string file_format = 4; } // Defines configuration for custom third-party data sources. @@ -127,6 +209,23 @@ message DataSource { bytes configuration = 1; } + // Defines options for DataSource that sources features from request data + message RequestDataOptions { + reserved 1; + // Mapping of feature name to type + map deprecated_schema = 2; + + repeated FeatureSpecV2 schema = 3; + + } + + // Defines options for DataSource that supports pushing data to it. This allows data to be pushed to + // the online store on-demand, such as by stream consumers. + message PushOptions { + reserved 1; + } + + // DataSource options. oneof options { FileOptions file_options = 11; @@ -134,6 +233,11 @@ message DataSource { KafkaOptions kafka_options = 13; KinesisOptions kinesis_options = 14; RedshiftOptions redshift_options = 15; + RequestDataOptions request_data_options = 18; CustomSourceOptions custom_options = 16; + SnowflakeOptions snowflake_options = 19; + PushOptions push_options = 22; + SparkOptions spark_options = 27; + TrinoOptions trino_options = 30; } } diff --git a/protos/feast/core/DatastoreTable.proto b/protos/feast/core/DatastoreTable.proto new file mode 100644 index 0000000000..4246a6ae6e --- /dev/null +++ b/protos/feast/core/DatastoreTable.proto @@ -0,0 +1,39 @@ +// +// * Copyright 2021 The Feast Authors +// * +// * Licensed under the Apache License, Version 2.0 (the "License"); +// * you may not use this file except in compliance with the License. +// * You may obtain a copy of the License at +// * +// * https://www.apache.org/licenses/LICENSE-2.0 +// * +// * Unless required by applicable law or agreed to in writing, software +// * distributed under the License is distributed on an "AS IS" BASIS, +// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// * See the License for the specific language governing permissions and +// * limitations under the License. +// + +syntax = "proto3"; + +package feast.core; +option java_package = "feast.proto.core"; +option java_outer_classname = "DatastoreTableProto"; +option go_package = "github.com/feast-dev/feast/go/protos/feast/core"; + +import "google/protobuf/wrappers.proto"; + +// Represents a Datastore table +message DatastoreTable { + // Feast project of the table + string project = 1; + + // Name of the table + string name = 2; + + // GCP project id + google.protobuf.StringValue project_id = 3; + + // Datastore namespace + google.protobuf.StringValue namespace = 4; +} \ No newline at end of file diff --git a/protos/feast/core/DynamoDBTable.proto b/protos/feast/core/DynamoDBTable.proto new file mode 100644 index 0000000000..4e5c8714e8 --- /dev/null +++ b/protos/feast/core/DynamoDBTable.proto @@ -0,0 +1,31 @@ +// +// * Copyright 2021 The Feast Authors +// * +// * Licensed under the Apache License, Version 2.0 (the "License"); +// * you may not use this file except in compliance with the License. +// * You may obtain a copy of the License at +// * +// * https://www.apache.org/licenses/LICENSE-2.0 +// * +// * Unless required by applicable law or agreed to in writing, software +// * distributed under the License is distributed on an "AS IS" BASIS, +// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// * See the License for the specific language governing permissions and +// * limitations under the License. +// + +syntax = "proto3"; + +package feast.core; +option java_package = "feast.proto.core"; +option java_outer_classname = "DynamoDBTableProto"; +option go_package = "github.com/feast-dev/feast/go/protos/feast/core"; + +// Represents a DynamoDB table +message DynamoDBTable { + // Name of the table + string name = 1; + + // Region of the table + string region = 2; +} \ No newline at end of file diff --git a/protos/feast/core/Entity.proto b/protos/feast/core/Entity.proto index 7846015c73..d8d8bedc5e 100644 --- a/protos/feast/core/Entity.proto +++ b/protos/feast/core/Entity.proto @@ -19,7 +19,7 @@ syntax = "proto3"; package feast.core; option java_package = "feast.proto.core"; option java_outer_classname = "EntityProto"; -option go_package = "github.com/feast-dev/feast/sdk/go/protos/feast/core"; +option go_package = "github.com/feast-dev/feast/go/protos/feast/core"; import "feast/types/Value.proto"; import "google/protobuf/timestamp.proto"; @@ -48,7 +48,10 @@ message EntitySpecV2 { string join_key = 4; // User defined metadata - map labels = 8; + map tags = 8; + + // Owner of the entity. + string owner = 10; } message EntityMeta { diff --git a/protos/feast/core/Feature.proto b/protos/feast/core/Feature.proto index ea0d340a00..f6826bef81 100644 --- a/protos/feast/core/Feature.proto +++ b/protos/feast/core/Feature.proto @@ -18,7 +18,7 @@ syntax = "proto3"; package feast.core; -option go_package = "github.com/feast-dev/feast/sdk/go/protos/feast/core"; +option go_package = "github.com/feast-dev/feast/go/protos/feast/core"; option java_outer_classname = "FeatureProto"; option java_package = "feast.proto.core"; @@ -31,6 +31,6 @@ message FeatureSpecV2 { // Value type of the feature. Not updatable. feast.types.ValueType.Enum value_type = 2; - // Labels for user defined metadata on a feature - map labels = 3; + // Tags for user defined metadata on a feature + map tags = 3; } diff --git a/protos/feast/core/FeatureService.proto b/protos/feast/core/FeatureService.proto index f4ab1cdbc8..2295677583 100644 --- a/protos/feast/core/FeatureService.proto +++ b/protos/feast/core/FeatureService.proto @@ -1,7 +1,7 @@ syntax = "proto3"; package feast.core; -option go_package = "github.com/feast-dev/feast/sdk/go/protos/feast/core"; +option go_package = "github.com/feast-dev/feast/go/protos/feast/core"; option java_outer_classname = "FeatureServiceProto"; option java_package = "feast.proto.core"; @@ -23,12 +23,18 @@ message FeatureServiceSpec { // Name of Feast project that this Feature Service belongs to. string project = 2; - // List of features that this feature service encapsulates. - // Stored as a list of references to other features views and the features from those views. + // Represents a projection that's to be applied on top of the FeatureView. + // Contains data such as the features to use from a FeatureView. repeated FeatureViewProjection features = 3; // User defined metadata map tags = 4; + + // Description of the feature service. + string description = 5; + + // Owner of the feature service. + string owner = 6; } diff --git a/protos/feast/core/FeatureTable.proto b/protos/feast/core/FeatureTable.proto index 661f4eecfc..4054db58ae 100644 --- a/protos/feast/core/FeatureTable.proto +++ b/protos/feast/core/FeatureTable.proto @@ -19,7 +19,7 @@ syntax = "proto3"; package feast.core; -option go_package = "github.com/feast-dev/feast/sdk/go/protos/feast/core"; +option go_package = "github.com/feast-dev/feast/go/protos/feast/core"; option java_outer_classname = "FeatureTableProto"; option java_package = "feast.proto.core"; diff --git a/protos/feast/core/FeatureView.proto b/protos/feast/core/FeatureView.proto index f39fcf5e73..2662350540 100644 --- a/protos/feast/core/FeatureView.proto +++ b/protos/feast/core/FeatureView.proto @@ -18,7 +18,7 @@ syntax = "proto3"; package feast.core; -option go_package = "github.com/feast-dev/feast/sdk/go/protos/feast/core"; +option go_package = "github.com/feast-dev/feast/go/protos/feast/core"; option java_outer_classname = "FeatureViewProto"; option java_package = "feast.proto.core"; @@ -35,6 +35,8 @@ message FeatureView { FeatureViewMeta meta = 2; } +// Next available id: 12 +// TODO(adchia): refactor common fields from this and ODFV into separate metadata proto message FeatureViewSpec { // Name of the feature view. Must be unique. Not updated. string name = 1; @@ -46,12 +48,18 @@ message FeatureViewSpec { // Feature View. Not updatable. repeated string entities = 3; - // List of features specifications for each feature defined with this feature view. + // List of specifications for each field defined as part of this feature view. repeated FeatureSpecV2 features = 4; + // Description of the feature view. + string description = 10; + // User defined metadata map tags = 5; + // Owner of the feature view. + string owner = 11; + // Features in this feature view can only be retrieved from online serving // younger than ttl. Ttl is measured as the duration of time between // the feature's event timestamp and when the feature is retrieved diff --git a/protos/feast/core/FeatureViewProjection.proto b/protos/feast/core/FeatureViewProjection.proto index a7b9ae9a89..36d17632e7 100644 --- a/protos/feast/core/FeatureViewProjection.proto +++ b/protos/feast/core/FeatureViewProjection.proto @@ -1,18 +1,25 @@ syntax = "proto3"; package feast.core; -option go_package = "github.com/feast-dev/feast/sdk/go/protos/feast/core"; +option go_package = "github.com/feast-dev/feast/go/protos/feast/core"; option java_outer_classname = "FeatureReferenceProto"; option java_package = "feast.proto.core"; import "feast/core/Feature.proto"; -// A reference to features in a feature view +// A projection to be applied on top of a FeatureView. +// Contains the modifications to a FeatureView such as the features subset to use. message FeatureViewProjection { // The feature view name string feature_view_name = 1; + // Alias for feature view name + string feature_view_name_alias = 3; + // The features of the feature view that are a part of the feature reference. repeated FeatureSpecV2 feature_columns = 2; + + // Map for entity join_key overrides of feature data entity join_key to entity data join_key + map join_key_map = 4; } diff --git a/protos/feast/core/InfraObject.proto b/protos/feast/core/InfraObject.proto new file mode 100644 index 0000000000..3520401517 --- /dev/null +++ b/protos/feast/core/InfraObject.proto @@ -0,0 +1,51 @@ +// +// * Copyright 2021 The Feast Authors +// * +// * Licensed under the Apache License, Version 2.0 (the "License"); +// * you may not use this file except in compliance with the License. +// * You may obtain a copy of the License at +// * +// * https://www.apache.org/licenses/LICENSE-2.0 +// * +// * Unless required by applicable law or agreed to in writing, software +// * distributed under the License is distributed on an "AS IS" BASIS, +// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// * See the License for the specific language governing permissions and +// * limitations under the License. +// + +syntax = "proto3"; + +package feast.core; +option java_package = "feast.proto.core"; +option java_outer_classname = "InfraObjectProto"; +option go_package = "github.com/feast-dev/feast/go/protos/feast/core"; + +import "feast/core/DatastoreTable.proto"; +import "feast/core/DynamoDBTable.proto"; +import "feast/core/SqliteTable.proto"; + +// Represents a set of infrastructure objects managed by Feast +message Infra { + // List of infrastructure objects managed by Feast + repeated InfraObject infra_objects = 1; +} + +// Represents a single infrastructure object managed by Feast +message InfraObject { + // Represents the Python class for the infrastructure object + string infra_object_class_type = 1; + + // The infrastructure object + oneof infra_object { + DynamoDBTable dynamodb_table = 2; + DatastoreTable datastore_table = 3; + SqliteTable sqlite_table = 4; + CustomInfra custom_infra = 100; + } + + // Allows for custom infra objects to be added + message CustomInfra { + bytes field = 1; + } +} \ No newline at end of file diff --git a/protos/feast/core/OnDemandFeatureView.proto b/protos/feast/core/OnDemandFeatureView.proto new file mode 100644 index 0000000000..33c51f5c4d --- /dev/null +++ b/protos/feast/core/OnDemandFeatureView.proto @@ -0,0 +1,86 @@ +// +// Copyright 2020 The Feast Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + + +syntax = "proto3"; +package feast.core; + +option go_package = "github.com/feast-dev/feast/go/protos/feast/core"; +option java_outer_classname = "OnDemandFeatureViewProto"; +option java_package = "feast.proto.core"; + +import "google/protobuf/timestamp.proto"; +import "feast/core/FeatureView.proto"; +import "feast/core/FeatureViewProjection.proto"; +import "feast/core/Feature.proto"; +import "feast/core/DataSource.proto"; + +message OnDemandFeatureView { + // User-specified specifications of this feature view. + OnDemandFeatureViewSpec spec = 1; + OnDemandFeatureViewMeta meta = 2; +} + +// Next available id: 9 +message OnDemandFeatureViewSpec { + // Name of the feature view. Must be unique. Not updated. + string name = 1; + + // Name of Feast project that this feature view belongs to. + string project = 2; + + // List of features specifications for each feature defined with this feature view. + repeated FeatureSpecV2 features = 3; + + // Map of sources for this feature view. + map sources = 4; + + UserDefinedFunction user_defined_function = 5; + + // Description of the on demand feature view. + string description = 6; + + // User defined metadata. + map tags = 7; + + // Owner of the on demand feature view. + string owner = 8; +} + +message OnDemandFeatureViewMeta { + // Time where this Feature View is created + google.protobuf.Timestamp created_timestamp = 1; + + // Time where this Feature View is last updated + google.protobuf.Timestamp last_updated_timestamp = 2; +} + +message OnDemandSource { + oneof source { + FeatureView feature_view = 1; + FeatureViewProjection feature_view_projection = 3; + DataSource request_data_source = 2; + } +} + +// Serialized representation of python function. +message UserDefinedFunction { + // The function name + string name = 1; + + // The python-syntax function body (serialized by dill) + bytes body = 2; +} diff --git a/protos/feast/core/Registry.proto b/protos/feast/core/Registry.proto index bb32d6fc3c..1978f41064 100644 --- a/protos/feast/core/Registry.proto +++ b/protos/feast/core/Registry.proto @@ -19,19 +19,33 @@ syntax = "proto3"; package feast.core; option java_package = "feast.proto.core"; option java_outer_classname = "RegistryProto"; -option go_package = "github.com/feast-dev/feast/sdk/go/protos/feast/core"; +option go_package = "github.com/feast-dev/feast/go/protos/feast/core"; import "feast/core/Entity.proto"; +import "feast/core/FeatureService.proto"; import "feast/core/FeatureTable.proto"; import "feast/core/FeatureView.proto"; +import "feast/core/InfraObject.proto"; +import "feast/core/OnDemandFeatureView.proto"; +import "feast/core/RequestFeatureView.proto"; +import "feast/core/DataSource.proto"; +import "feast/core/SavedDataset.proto"; import "google/protobuf/timestamp.proto"; +// Next id: 13 message Registry { repeated Entity entities = 1; repeated FeatureTable feature_tables = 2; repeated FeatureView feature_views = 6; + repeated DataSource data_sources = 12; + repeated OnDemandFeatureView on_demand_feature_views = 8; + repeated RequestFeatureView request_feature_views = 9; + repeated FeatureService feature_services = 7; + repeated SavedDataset saved_datasets = 11; + Infra infra = 10; string registry_schema_version = 3; // to support migrations; incremented when schema is changed string version_id = 4; // version id, random string generated on each update of the data; now used only for debugging purposes google.protobuf.Timestamp last_updated = 5; + } diff --git a/protos/feast/core/RequestFeatureView.proto b/protos/feast/core/RequestFeatureView.proto new file mode 100644 index 0000000000..4049053c2b --- /dev/null +++ b/protos/feast/core/RequestFeatureView.proto @@ -0,0 +1,51 @@ +// +// Copyright 2021 The Feast Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + + +syntax = "proto3"; +package feast.core; + +option go_package = "github.com/feast-dev/feast/go/protos/feast/core"; +option java_outer_classname = "RequestFeatureViewProto"; +option java_package = "feast.proto.core"; + +import "feast/core/DataSource.proto"; + +message RequestFeatureView { + // User-specified specifications of this feature view. + RequestFeatureViewSpec spec = 1; +} + +// Next available id: 7 +message RequestFeatureViewSpec { + // Name of the feature view. Must be unique. Not updated. + string name = 1; + + // Name of Feast project that this feature view belongs to. + string project = 2; + + // Request data which contains the underlying data schema and list of associated features + DataSource request_data_source = 3; + + // Description of the request feature view. + string description = 4; + + // User defined metadata. + map tags = 5; + + // Owner of the request feature view. + string owner = 6; +} diff --git a/protos/feast/core/SavedDataset.proto b/protos/feast/core/SavedDataset.proto new file mode 100644 index 0000000000..353e925ad1 --- /dev/null +++ b/protos/feast/core/SavedDataset.proto @@ -0,0 +1,81 @@ +// +// Copyright 2021 The Feast Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + + +syntax = "proto3"; + +package feast.core; +option java_package = "feast.proto.core"; +option java_outer_classname = "SavedDatasetProto"; +option go_package = "github.com/feast-dev/feast/go/protos/feast/core"; + +import "google/protobuf/timestamp.proto"; +import "feast/core/DataSource.proto"; + +message SavedDatasetSpec { + // Name of the dataset. Must be unique since it's possible to overwrite dataset by name + string name = 1; + + // Name of Feast project that this Dataset belongs to. + string project = 2; + + // list of feature references with format ":" + repeated string features = 3; + + // entity columns + request columns from all feature views used during retrieval + repeated string join_keys = 4; + + // Whether full feature names are used in stored data + bool full_feature_names = 5; + + SavedDatasetStorage storage = 6; + + // Optional and only populated if generated from a feature service fetch + string feature_service_name = 8; + + // User defined metadata + map tags = 7; +} + +message SavedDatasetStorage { + oneof kind { + DataSource.FileOptions file_storage = 4; + DataSource.BigQueryOptions bigquery_storage = 5; + DataSource.RedshiftOptions redshift_storage = 6; + DataSource.SnowflakeOptions snowflake_storage = 7; + DataSource.TrinoOptions trino_storage = 8; + DataSource.SparkOptions spark_storage = 9; + } +} + +message SavedDatasetMeta { + // Time when this saved dataset is created + google.protobuf.Timestamp created_timestamp = 1; + + // Time when this saved dataset is last updated + google.protobuf.Timestamp last_updated_timestamp = 2; + + // Min timestamp in the dataset (needed for retrieval) + google.protobuf.Timestamp min_event_timestamp = 3; + + // Max timestamp in the dataset (needed for retrieval) + google.protobuf.Timestamp max_event_timestamp = 4; +} + +message SavedDataset { + SavedDatasetSpec spec = 1; + SavedDatasetMeta meta = 2; +} diff --git a/protos/feast/core/SqliteTable.proto b/protos/feast/core/SqliteTable.proto new file mode 100644 index 0000000000..8665be840a --- /dev/null +++ b/protos/feast/core/SqliteTable.proto @@ -0,0 +1,31 @@ +// +// * Copyright 2021 The Feast Authors +// * +// * Licensed under the Apache License, Version 2.0 (the "License"); +// * you may not use this file except in compliance with the License. +// * You may obtain a copy of the License at +// * +// * https://www.apache.org/licenses/LICENSE-2.0 +// * +// * Unless required by applicable law or agreed to in writing, software +// * distributed under the License is distributed on an "AS IS" BASIS, +// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// * See the License for the specific language governing permissions and +// * limitations under the License. +// + +syntax = "proto3"; + +package feast.core; +option java_package = "feast.proto.core"; +option java_outer_classname = "SqliteTableProto"; +option go_package = "github.com/feast-dev/feast/go/protos/feast/core"; + +// Represents a Sqlite table +message SqliteTable { + // Absolute path of the table + string path = 1; + + // Name of the table + string name = 2; +} \ No newline at end of file diff --git a/protos/feast/core/Store.proto b/protos/feast/core/Store.proto index 41a76a11c2..c92a526354 100644 --- a/protos/feast/core/Store.proto +++ b/protos/feast/core/Store.proto @@ -19,7 +19,7 @@ package feast.core; option java_package = "feast.proto.core"; option java_outer_classname = "StoreProto"; -option go_package = "github.com/feast-dev/feast/sdk/go/protos/feast/core"; +option go_package = "github.com/feast-dev/feast/go/protos/feast/core"; // Store provides a location where Feast reads and writes feature values. // Feature values will be written to the Store in the form of FeatureRow elements. diff --git a/protos/feast/core/ValidationProfile.proto b/protos/feast/core/ValidationProfile.proto new file mode 100644 index 0000000000..673a792fdf --- /dev/null +++ b/protos/feast/core/ValidationProfile.proto @@ -0,0 +1,47 @@ +// +// Copyright 2021 The Feast Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + + +syntax = "proto3"; + +package feast.core; +option java_package = "feast.proto.core"; +option java_outer_classname = "ValidationProfile"; +option go_package = "github.com/feast-dev/feast/go/protos/feast/core"; + +import "feast/core/SavedDataset.proto"; + +message GEValidationProfiler { + message UserDefinedProfiler { + // The python-syntax function body (serialized by dill) + bytes body = 1; + } + + UserDefinedProfiler profiler = 1; +} + +message GEValidationProfile { + // JSON-serialized ExpectationSuite object + bytes expectation_suite = 1; +} + +message ValidationReference { + SavedDataset dataset = 1; + + oneof profiler { + GEValidationProfiler ge_profiler = 2; + } +} diff --git a/protos/feast/serving/Connector.proto b/protos/feast/serving/Connector.proto new file mode 100644 index 0000000000..4e4ec51774 --- /dev/null +++ b/protos/feast/serving/Connector.proto @@ -0,0 +1,34 @@ +syntax = "proto3"; + +package grpc.connector; + +import "google/protobuf/timestamp.proto"; +import "feast/types/Value.proto"; +import "feast/types/EntityKey.proto"; +import "feast/serving/ServingService.proto"; + +option go_package = "github.com/feast-dev/feast/go/protos/feast/serving"; + +message ConnectorFeature { + feast.serving.FeatureReferenceV2 reference = 1; + google.protobuf.Timestamp timestamp = 2; + feast.types.Value value = 3; +} + +message ConnectorFeatureList { + repeated ConnectorFeature featureList = 1; +} + +service OnlineStore { + rpc OnlineRead(OnlineReadRequest) returns (OnlineReadResponse); +} + +message OnlineReadRequest { + repeated feast.types.EntityKey entityKeys = 1; + string view = 2; + repeated string features = 3; +} + +message OnlineReadResponse { + repeated ConnectorFeatureList results = 1; +} \ No newline at end of file diff --git a/protos/feast/serving/ServingService.proto b/protos/feast/serving/ServingService.proto index 5ed7c0c55d..a940b72502 100644 --- a/protos/feast/serving/ServingService.proto +++ b/protos/feast/serving/ServingService.proto @@ -20,18 +20,16 @@ package feast.serving; import "google/protobuf/timestamp.proto"; import "feast/types/Value.proto"; -import "tensorflow_metadata/proto/v0/statistics.proto"; option java_package = "feast.proto.serving"; option java_outer_classname = "ServingAPIProto"; -option go_package = "github.com/feast-dev/feast/sdk/go/protos/feast/serving"; +option go_package = "github.com/feast-dev/feast/go/protos/feast/serving"; service ServingService { // Get information about this Feast serving. rpc GetFeastServingInfo (GetFeastServingInfoRequest) returns (GetFeastServingInfoResponse); - - // Get online features (v2) synchronously. - rpc GetOnlineFeaturesV2 (GetOnlineFeaturesRequestV2) returns (GetOnlineFeaturesResponse); + // Get online features synchronously. + rpc GetOnlineFeatures (GetOnlineFeaturesRequest) returns (GetOnlineFeaturesResponse); } message GetFeastServingInfoRequest {} @@ -39,24 +37,17 @@ message GetFeastServingInfoRequest {} message GetFeastServingInfoResponse { // Feast version of this serving deployment. string version = 1; - - // Type of serving deployment, either ONLINE or BATCH. Different store types support different - // feature retrieval methods. - FeastServingType type = 2; - - // Note: Batch specific options start from 10. - // Staging location for this serving store, if any. - string job_staging_location = 10; } message FeatureReferenceV2 { - // Name of the Feature Table to retrieve the feature from. - string feature_table = 1; + // Name of the Feature View to retrieve the feature from. + string feature_view_name = 1; // Name of the Feature to retrieve the feature from. - string name = 2; + string feature_name = 2; } +// ToDo (oleksii): remove this message (since it's not used) and move EntityRow on package level message GetOnlineFeaturesRequestV2 { // List of features that are being retrieved repeated FeatureReferenceV2 features = 4; @@ -81,46 +72,62 @@ message GetOnlineFeaturesRequestV2 { } } -message GetOnlineFeaturesResponse { - // Feature values retrieved from feast. - repeated FieldValues field_values = 1; - - message FieldValues { - // Map of feature or entity name to feature/entity values. - // Timestamps are not returned in this response. - map fields = 1; - // Map of feature or entity name to feature/entity statuses/metadata. - map statuses = 2; +// In JSON "val" field can be omitted +message FeatureList { + repeated string val = 1; +} + +message GetOnlineFeaturesRequest { + oneof kind { + string feature_service = 1; + FeatureList features = 2; } - - enum FieldStatus { - // Status is unset for this field. - INVALID = 0; - - // Field value is present for this field and age is within max age. - PRESENT = 1; - - // Values could be found for entity key and age is within max age, but - // this field value is assigned a value on ingestion into feast. - NULL_VALUE = 2; - - // Entity key did not return any values as they do not exist in Feast. - // This could suggest that the feature values have not yet been ingested - // into feast or the ingestion failed. - NOT_FOUND = 3; - - // Values could be found for entity key, but field values are outside the maximum - // allowable range. - OUTSIDE_MAX_AGE = 4; + // The entity data is specified in a columnar format + // A map of entity name -> list of values + map entities = 3; + bool full_feature_names = 4; + + // Context for OnDemand Feature Transformation + // (was moved to dedicated parameter to avoid unnecessary separation logic on serving side) + // A map of variable name -> list of values + map request_context = 5; +} + +message GetOnlineFeaturesResponse { + GetOnlineFeaturesResponseMetadata metadata = 1; + + // Length of "results" array should match length of requested features. + // We also preserve the same order of features here as in metadata.feature_names + repeated FeatureVector results = 2; + + message FeatureVector { + repeated feast.types.Value values = 1; + repeated FieldStatus statuses = 2; + repeated google.protobuf.Timestamp event_timestamps = 3; } } -enum FeastServingType { - FEAST_SERVING_TYPE_INVALID = 0; - // Online serving receives entity data directly and synchronously and will - // respond immediately. - FEAST_SERVING_TYPE_ONLINE = 1; - // Batch serving receives entity data asynchronously and orchestrates the - // retrieval through a staging location. - FEAST_SERVING_TYPE_BATCH = 2; +message GetOnlineFeaturesResponseMetadata { + FeatureList feature_names = 1; +} + +enum FieldStatus { + // Status is unset for this field. + INVALID = 0; + + // Field value is present for this field and age is within max age. + PRESENT = 1; + + // Values could be found for entity key and age is within max age, but + // this field value is assigned a value on ingestion into feast. + NULL_VALUE = 2; + + // Entity key did not return any values as they do not exist in Feast. + // This could suggest that the feature values have not yet been ingested + // into feast or the ingestion failed. + NOT_FOUND = 3; + + // Values could be found for entity key, but field values are outside the maximum + // allowable range. + OUTSIDE_MAX_AGE = 4; } diff --git a/protos/feast/serving/TransformationService.proto b/protos/feast/serving/TransformationService.proto new file mode 100644 index 0000000000..bd1a7917f3 --- /dev/null +++ b/protos/feast/serving/TransformationService.proto @@ -0,0 +1,67 @@ +/* + * Copyright 2021 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; + +package feast.serving; + +option java_package = "feast.proto.serving"; +option java_outer_classname = "TransformationServiceAPIProto"; +option go_package = "github.com/feast-dev/feast/go/protos/feast/serving"; + +service TransformationService { + rpc GetTransformationServiceInfo (GetTransformationServiceInfoRequest) returns (GetTransformationServiceInfoResponse); + + rpc TransformFeatures (TransformFeaturesRequest) returns (TransformFeaturesResponse); +} + +message ValueType { + oneof value { + // Having a oneOf provides forward compatibility if we need to support compound types + // that are not supported by arrow natively. + bytes arrow_value = 1; + } +} + +message GetTransformationServiceInfoRequest {} + +message GetTransformationServiceInfoResponse { + // Feast version of this transformation service deployment. + string version = 1; + + // Type of transformation service deployment. This is either Python, or custom + TransformationServiceType type = 2; + + string transformation_service_type_details = 3; +} + +message TransformFeaturesRequest { + string on_demand_feature_view_name = 1; + string project = 2; + + ValueType transformation_input = 3; +} + +message TransformFeaturesResponse { + ValueType transformation_output = 3; +} + +enum TransformationServiceType { + TRANSFORMATION_SERVICE_TYPE_INVALID = 0; + TRANSFORMATION_SERVICE_TYPE_PYTHON = 1; + + TRANSFORMATION_SERVICE_TYPE_CUSTOM = 100; +} diff --git a/protos/feast/storage/Redis.proto b/protos/feast/storage/Redis.proto index a662e352f4..c89e0b6b2f 100644 --- a/protos/feast/storage/Redis.proto +++ b/protos/feast/storage/Redis.proto @@ -16,14 +16,13 @@ syntax = "proto3"; -import "feast/types/Field.proto"; import "feast/types/Value.proto"; package feast.storage; option java_outer_classname = "RedisProto"; option java_package = "feast.proto.storage"; -option go_package = "github.com/feast-dev/feast/sdk/go/protos/feast/storage"; +option go_package = "github.com/feast-dev/feast/go/protos/feast/storage"; message RedisKeyV2 { string project = 1; diff --git a/protos/feast/types/EntityKey.proto b/protos/feast/types/EntityKey.proto index cbc3c55442..d7eebf25d0 100644 --- a/protos/feast/types/EntityKey.proto +++ b/protos/feast/types/EntityKey.proto @@ -22,7 +22,7 @@ package feast.types; option java_package = "feast.proto.types"; option java_outer_classname = "EntityKeyProto"; -option go_package = "github.com/feast-dev/feast/sdk/go/protos/feast/types"; +option go_package = "github.com/feast-dev/feast/go/protos/feast/types"; message EntityKey { repeated string join_keys = 1; diff --git a/protos/feast/types/Field.proto b/protos/feast/types/Field.proto index 3b8416c253..8349263cc6 100644 --- a/protos/feast/types/Field.proto +++ b/protos/feast/types/Field.proto @@ -22,9 +22,9 @@ package feast.types; option java_package = "feast.proto.types"; option java_outer_classname = "FieldProto"; -option go_package = "github.com/feast-dev/feast/sdk/go/protos/feast/types"; +option go_package = "github.com/feast-dev/feast/go/protos/feast/types"; message Field { string name = 1; - feast.types.Value value = 2; + feast.types.ValueType.Enum value = 2; } diff --git a/protos/feast/types/Value.proto b/protos/feast/types/Value.proto index 23d03e651b..b273fecfea 100644 --- a/protos/feast/types/Value.proto +++ b/protos/feast/types/Value.proto @@ -20,7 +20,7 @@ package feast.types; option java_package = "feast.proto.types"; option java_outer_classname = "ValueProto"; -option go_package = "github.com/feast-dev/feast/sdk/go/protos/feast/types"; +option go_package = "github.com/feast-dev/feast/go/protos/feast/types"; message ValueType { enum Enum { @@ -41,12 +41,14 @@ message ValueType { FLOAT_LIST = 16; BOOL_LIST = 17; UNIX_TIMESTAMP_LIST = 18; + NULL = 19; } } message Value { // ValueType is referenced by the metadata types, FeatureInfo and EntityInfo. // The enum values do not have to match the oneof val field ids, but they should. + // In JSON "*_val" field can be omitted oneof val { bytes bytes_val = 1; string string_val = 2; @@ -64,9 +66,14 @@ message Value { FloatList float_list_val = 16; BoolList bool_list_val = 17; Int64List unix_timestamp_list_val = 18; + Null null_val = 19; } } +enum Null { + NULL = 0; +} + message BytesList { repeated bytes val = 1; } @@ -94,3 +101,9 @@ message FloatList { message BoolList { repeated bool val = 1; } + +// This is to avoid an issue of being unable to specify `repeated value` in oneofs or maps +// In JSON "val" field can be omitted +message RepeatedValue { + repeated Value val = 1; +} \ No newline at end of file diff --git a/protos/tensorflow_metadata/proto/v0/path.proto b/protos/tensorflow_metadata/proto/v0/path.proto deleted file mode 100644 index 3a4e41bad9..0000000000 --- a/protos/tensorflow_metadata/proto/v0/path.proto +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright 2018 The TensorFlow Authors. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// ============================================================================= - -syntax = "proto2"; -option cc_enable_arenas = true; - -package tensorflow.metadata.v0; - -option java_package = "org.tensorflow.metadata.v0"; -option java_multiple_files = true; -option go_package = "github.com/feast-dev/feast/sdk/go/protos/tensorflow_metadata/proto/v0"; - -// A path is a more general substitute for the name of a field or feature that -// can be used for flat examples as well as structured data. For example, if -// we had data in a protocol buffer: -// message Person { -// int age = 1; -// optional string gender = 2; -// repeated Person parent = 3; -// } -// Thus, here the path {step:["parent", "age"]} in statistics would refer to the -// age of a parent, and {step:["parent", "parent", "age"]} would refer to the -// age of a grandparent. This allows us to distinguish between the statistics -// of parents' ages and grandparents' ages. In general, repeated messages are -// to be preferred to linked lists of arbitrary length. -// For SequenceExample, if we have a feature list "foo", this is represented -// by {step:["##SEQUENCE##", "foo"]}. -message Path { - // Any string is a valid step. - // However, whenever possible have a step be [A-Za-z0-9_]+. - repeated string step = 1; -} diff --git a/protos/tensorflow_metadata/proto/v0/schema.proto b/protos/tensorflow_metadata/proto/v0/schema.proto deleted file mode 100644 index 00005ee913..0000000000 --- a/protos/tensorflow_metadata/proto/v0/schema.proto +++ /dev/null @@ -1,673 +0,0 @@ -// Copyright 2017 The TensorFlow Authors. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// ============================================================================= - -syntax = "proto2"; - -package tensorflow.metadata.v0; - -import "google/protobuf/any.proto"; -import "tensorflow_metadata/proto/v0/path.proto"; - -option cc_enable_arenas = true; -option java_package = "org.tensorflow.metadata.v0"; -option java_multiple_files = true; -option go_package = "github.com/feast-dev/feast/sdk/go/protos/tensorflow_metadata/proto/v0"; - -// LifecycleStage. Only UNKNOWN_STAGE, BETA, and PRODUCTION features are -// actually validated. -// PLANNED, ALPHA, and DEBUG are treated as DEPRECATED. -enum LifecycleStage { - UNKNOWN_STAGE = 0; // Unknown stage. - PLANNED = 1; // Planned feature, may not be created yet. - ALPHA = 2; // Prototype feature, not used in experiments yet. - BETA = 3; // Used in user-facing experiments. - PRODUCTION = 4; // Used in a significant fraction of user traffic. - DEPRECATED = 5; // No longer supported: do not use in new models. - DEBUG_ONLY = 6; // Only exists for debugging purposes. -} - -// -// Message to represent schema information. -// NextID: 14 -message Schema { - // Features described in this schema. - repeated Feature feature = 1; - - // Sparse features described in this schema. - repeated SparseFeature sparse_feature = 6; - - // Weighted features described in this schema. - repeated WeightedFeature weighted_feature = 12; - - // Use StructDomain instead. - // Sequences described in this schema. A sequence may be described in terms of - // several features. Any features appearing within a sequence must *not* be - // declared as top-level features in . -// GOOGLE-LEGACY repeated Sequence sequence = 2; - - // declared as top-level features in . - // String domains referenced in the features. - repeated StringDomain string_domain = 4; - - // top level float domains that can be reused by features - repeated FloatDomain float_domain = 9; - - // top level int domains that can be reused by features - repeated IntDomain int_domain = 10; - - // Default environments for each feature. - // An environment represents both a type of location (e.g. a server or phone) - // and a time (e.g. right before model X is run). In the standard scenario, - // 99% of the features should be in the default environments TRAINING, - // SERVING, and the LABEL (or labels) AND WEIGHT is only available at TRAINING - // (not at serving). - // Other possible variations: - // 1. There may be TRAINING_MOBILE, SERVING_MOBILE, TRAINING_SERVICE, - // and SERVING_SERVICE. - // 2. If one is ensembling three models, where the predictions of the first - // three models are available for the ensemble model, there may be - // TRAINING, SERVING_INITIAL, SERVING_ENSEMBLE. - // See FeatureProto::not_in_environment and FeatureProto::in_environment. - repeated string default_environment = 5; - - /* BEGIN GOOGLE-LEGACY - // TODO(b/73109633): Change default to false, before removing this field. - optional bool generate_legacy_feature_spec = 7 [default = true]; - END GOOGLE-LEGACY */ - - // Additional information about the schema as a whole. Features may also - // be annotated individually. - optional Annotation annotation = 8; - - // Dataset-level constraints. This is currently used for specifying - // information about changes in num_examples. - optional DatasetConstraints dataset_constraints = 11; - - // TensorRepresentation groups. The keys are the names of the groups. - // Key "" (empty string) denotes the "default" group, which is what should - // be used when a group name is not provided. - // See the documentation at TensorRepresentationGroup for more info. - // Under development. DO NOT USE. - map tensor_representation_group = 13; -} - -// Describes schema-level information about a specific feature. -// NextID: 31 -message Feature { - // The name of the feature. - optional string name = 1; // required - - // This field is no longer supported. Instead, use: - // lifecycle_stage: DEPRECATED - // TODO(b/111450258): remove this. - optional bool deprecated = 2 [deprecated = true]; - - // Comment field for a human readable description of the field. - // TODO(b/123518108): remove this. -// GOOGLE-LEGACY optional string comment = 3 [deprecated = true]; - - oneof presence_constraints { - // Constraints on the presence of this feature in the examples. - FeaturePresence presence = 14; - // Only used in the context of a "group" context, e.g., inside a sequence. - FeaturePresenceWithinGroup group_presence = 17; - } - - // The shape of the feature which governs the number of values that appear in - // each example. - oneof shape_type { - // The feature has a fixed shape corresponding to a multi-dimensional - // tensor. - FixedShape shape = 23; - // The feature doesn't have a well defined shape. All we know are limits on - // the minimum and maximum number of values. - ValueCount value_count = 5; - } - - // Physical type of the feature's values. - // Note that you can have: - // type: BYTES - // int_domain: { - // min: 0 - // max: 3 - // } - // This would be a field that is syntactically BYTES (i.e. strings), but - // semantically an int, i.e. it would be "0", "1", "2", or "3". - optional FeatureType type = 6; - - // Domain for the values of the feature. - oneof domain_info { - // Reference to a domain defined at the schema level. - string domain = 7; - // Inline definitions of domains. - IntDomain int_domain = 9; - FloatDomain float_domain = 10; - StringDomain string_domain = 11; - BoolDomain bool_domain = 13; - StructDomain struct_domain = 29; - // Supported semantic domains. - NaturalLanguageDomain natural_language_domain = 24; - ImageDomain image_domain = 25; - MIDDomain mid_domain = 26; - URLDomain url_domain = 27; - TimeDomain time_domain = 28; - TimeOfDayDomain time_of_day_domain = 30; - } - - // Constraints on the distribution of the feature values. - // Currently only supported for StringDomains. - // TODO(b/69473628): Extend functionality to other domain types. - optional DistributionConstraints distribution_constraints = 15; - - // Additional information about the feature for documentation purpose. - optional Annotation annotation = 16; - - // Tests comparing the distribution to the associated serving data. - optional FeatureComparator skew_comparator = 18; - - // Tests comparing the distribution between two consecutive spans (e.g. days). - optional FeatureComparator drift_comparator = 21; - - // List of environments this feature is present in. - // Should be disjoint from not_in_environment. - // This feature is in environment "foo" if: - // ("foo" is in in_environment or default_environments) AND - // "foo" is not in not_in_environment. - // See Schema::default_environments. - repeated string in_environment = 20; - - // List of environments this feature is not present in. - // Should be disjoint from of in_environment. - // See Schema::default_environments and in_environment. - repeated string not_in_environment = 19; - - // The lifecycle stage of a feature. It can also apply to its descendants. - // i.e., if a struct is DEPRECATED, its children are implicitly deprecated. - optional LifecycleStage lifecycle_stage = 22; -} - -// Additional information about the schema or about a feature. -message Annotation { - // Tags can be used to mark features. For example, tag on user_age feature can - // be `user_feature`, tag on user_country feature can be `location_feature`, - // `user_feature`. - repeated string tag = 1; - // Free-text comments. This can be used as a description of the feature, - // developer notes etc. - repeated string comment = 2; - // Application-specific metadata may be attached here. - repeated .google.protobuf.Any extra_metadata = 3; -} - -// Checks that the ratio of the current value to the previous value is not below -// the min_fraction_threshold or above the max_fraction_threshold. That is, -// previous value * min_fraction_threshold <= current value <= -// previous value * max_fraction_threshold. -// To specify that the value cannot change, set both min_fraction_threshold and -// max_fraction_threshold to 1.0. -message NumericValueComparator { - optional double min_fraction_threshold = 1; - optional double max_fraction_threshold = 2; -} - -// Constraints on the entire dataset. -message DatasetConstraints { - // Tests differences in number of examples between the current data and the - // previous span. - optional NumericValueComparator num_examples_drift_comparator = 1; - // Tests comparisions in number of examples between the current data and the - // previous version of that data. - optional NumericValueComparator num_examples_version_comparator = 2; - // Minimum number of examples in the dataset. - optional int64 min_examples_count = 3; -} - -// Specifies a fixed shape for the feature's values. The immediate implication -// is that each feature has a fixed number of values. Moreover, these values -// can be parsed in a multi-dimensional tensor using the specified axis sizes. -// The FixedShape defines a lexicographical ordering of the data. For instance, -// if there is a FixedShape { -// dim {size:3} dim {size:2} -// } -// then tensor[0][0]=field[0] -// then tensor[0][1]=field[1] -// then tensor[1][0]=field[2] -// then tensor[1][1]=field[3] -// then tensor[2][0]=field[4] -// then tensor[2][1]=field[5] -// -// The FixedShape message is identical with the TensorFlow TensorShape proto -// message. -message FixedShape { - // The dimensions that define the shape. The total number of values in each - // example is the product of sizes of each dimension. - repeated Dim dim = 2; - - // An axis in a multi-dimensional feature representation. - message Dim { - optional int64 size = 1; - - // Optional name of the tensor dimension. - optional string name = 2; - } -} - -// Limits on maximum and minimum number of values in a -// single example (when the feature is present). Use this when the minimum -// value count can be different than the maximum value count. Otherwise prefer -// FixedShape. -message ValueCount { - optional int64 min = 1; - optional int64 max = 2; -} - -/* BEGIN GOOGLE-LEGACY -// Constraint on the number of elements in a sequence. -message LengthConstraint { - optional int64 min = 1; - optional int64 max = 2; -} - -// A sequence is a logical feature that comprises several "raw" features that -// encode values at different "steps" within the sequence. -// TODO(b/110490010): Delete this. This is a special case of StructDomain. -message Sequence { - // An optional name for this sequence. Used mostly for debugging and - // presentation. - optional string name = 1; - - // Features that comprise the sequence. These features are "zipped" together - // to form the values for the sequence at different steps. - // - Use group_presence within each feature to encode presence constraints - // within the sequence. - // - If all features have the same value-count constraints then - // declare this once using the shape_constraint below. - repeated Feature feature = 2; - - // Constraints on the presence of the sequence across all examples in the - // dataset. The sequence is assumed to be present if at least one of its - // features is present. - optional FeaturePresence presence = 3; - - // Shape constraints that apply on all the features that comprise the - // sequence. If this is set then the value_count in 'feature' is - // ignored. - // TODO(martinz): delete: there is no reason to believe the shape of the - // fields in a sequence will be the same. Use the fields in Feature instead. - oneof shape_constraint { - ValueCount value_count = 4; - FixedShape fixed_shape = 5; - } - - // Constraint on the number of elements in a sequence. - optional LengthConstraint length_constraint = 6; -} -END GOOGLE-LEGACY */ - -// Represents a weighted feature that is encoded as a combination of raw base -// features. The `weight_feature` should be a float feature with identical -// shape as the `feature`. This is useful for representing weights associated -// with categorical tokens (e.g. a TFIDF weight associated with each token). -// TODO(b/142122960): Handle WeightedCategorical end to end in TFX (validation, -// TFX Unit Testing, etc) -message WeightedFeature { - // Name for the weighted feature. This should not clash with other features in - // the same schema. - optional string name = 1; // required - // Path of a base feature to be weighted. Required. - optional Path feature = 2; - // Path of weight feature to associate with the base feature. Must be same - // shape as feature. Required. - optional Path weight_feature = 3; - // The lifecycle_stage determines where a feature is expected to be used, - // and therefore how important issues with it are. - optional LifecycleStage lifecycle_stage = 4; -} - -// A sparse feature represents a sparse tensor that is encoded with a -// combination of raw features, namely index features and a value feature. Each -// index feature defines a list of indices in a different dimension. -message SparseFeature { - reserved 11; - // Name for the sparse feature. This should not clash with other features in - // the same schema. - optional string name = 1; // required - - // This field is no longer supported. Instead, use: - // lifecycle_stage: DEPRECATED - // TODO(b/111450258): remove this. - optional bool deprecated = 2 [deprecated = true]; - - // The lifecycle_stage determines where a feature is expected to be used, - // and therefore how important issues with it are. - optional LifecycleStage lifecycle_stage = 7; - - // Comment field for a human readable description of the field. - // TODO(martinz): delete, convert to annotation. -// GOOGLE-LEGACY optional string comment = 3 [deprecated = true]; - - // Constraints on the presence of this feature in examples. - // Deprecated, this is inferred by the referred features. - optional FeaturePresence presence = 4 [deprecated = true]; - - // Shape of the sparse tensor that this SparseFeature represents. - // Currently not supported. - // TODO(b/109669962): Consider deriving this from the referred features. - optional FixedShape dense_shape = 5; - - // Features that represent indexes. Should be integers >= 0. - repeated IndexFeature index_feature = 6; // at least one - message IndexFeature { - // Name of the index-feature. This should be a reference to an existing - // feature in the schema. - optional string name = 1; - } - - // If true then the index values are already sorted lexicographically. - optional bool is_sorted = 8; - - optional ValueFeature value_feature = 9; // required - message ValueFeature { - // Name of the value-feature. This should be a reference to an existing - // feature in the schema. - optional string name = 1; - } - - // Type of value feature. - // Deprecated, this is inferred by the referred features. - optional FeatureType type = 10 [deprecated = true]; -} - -// Models constraints on the distribution of a feature's values. -// TODO(martinz): replace min_domain_mass with max_off_domain (but slowly). -message DistributionConstraints { - // The minimum fraction (in [0,1]) of values across all examples that - // should come from the feature's domain, e.g.: - // 1.0 => All values must come from the domain. - // .9 => At least 90% of the values must come from the domain. - optional double min_domain_mass = 1 [default = 1.0]; -} - -// Encodes information for domains of integer values. -// Note that FeatureType could be either INT or BYTES. -message IntDomain { - // Id of the domain. Required if the domain is defined at the schema level. If - // so, then the name must be unique within the schema. - optional string name = 1; - - // Min and max values for the domain. - optional int64 min = 3; - optional int64 max = 4; - - // If true then the domain encodes categorical values (i.e., ids) rather than - // ordinal values. - optional bool is_categorical = 5; -} - -// Encodes information for domains of float values. -// Note that FeatureType could be either INT or BYTES. -message FloatDomain { - // Id of the domain. Required if the domain is defined at the schema level. If - // so, then the name must be unique within the schema. - optional string name = 1; - - // Min and max values of the domain. - optional float min = 3; - optional float max = 4; -} - -// Domain for a recursive struct. -// NOTE: If a feature with a StructDomain is deprecated, then all the -// child features (features and sparse_features of the StructDomain) are also -// considered to be deprecated. Similarly child features can only be in -// environments of the parent feature. -message StructDomain { - repeated Feature feature = 1; - - repeated SparseFeature sparse_feature = 2; -} - -// Encodes information for domains of string values. -message StringDomain { - // Id of the domain. Required if the domain is defined at the schema level. If - // so, then the name must be unique within the schema. - optional string name = 1; - - // The values appearing in the domain. - repeated string value = 2; -} - -// Encodes information about the domain of a boolean attribute that encodes its -// TRUE/FALSE values as strings, or 0=false, 1=true. -// Note that FeatureType could be either INT or BYTES. -message BoolDomain { - // Id of the domain. Required if the domain is defined at the schema level. If - // so, then the name must be unique within the schema. - optional string name = 1; - - // Strings values for TRUE/FALSE. - optional string true_value = 2; - optional string false_value = 3; -} - -// BEGIN SEMANTIC-TYPES-PROTOS -// Semantic domains are specialized feature domains. For example a string -// Feature might represent a Time of a specific format. -// Semantic domains are defined as protocol buffers to allow further sub-types / -// specialization, e.g: NaturalLanguageDomain can provide information on the -// language of the text. - -// Natural language text. -message NaturalLanguageDomain {} - -// Image data. -message ImageDomain {} - -// Knowledge graph ID, see: https://www.wikidata.org/wiki/Property:P646 -message MIDDomain {} - -// A URL, see: https://en.wikipedia.org/wiki/URL -message URLDomain {} - -// Time or date representation. -message TimeDomain { - enum IntegerTimeFormat { - FORMAT_UNKNOWN = 0; - UNIX_DAYS = 5; // Number of days since 1970-01-01. - UNIX_SECONDS = 1; - UNIX_MILLISECONDS = 2; - UNIX_MICROSECONDS = 3; - UNIX_NANOSECONDS = 4; - } - - oneof format { - // Expected format that contains a combination of regular characters and - // special format specifiers. Format specifiers are a subset of the - // strptime standard. - string string_format = 1; - - // Expected format of integer times. - IntegerTimeFormat integer_format = 2; - } -} - -// Time of day, without a particular date. -message TimeOfDayDomain { - enum IntegerTimeOfDayFormat { - FORMAT_UNKNOWN = 0; - // Time values, containing hour/minute/second/nanos, encoded into 8-byte - // bit fields following the ZetaSQL convention: - // 6 5 4 3 2 1 - // MSB 3210987654321098765432109876543210987654321098765432109876543210 LSB - // | H || M || S ||---------- nanos -----------| - PACKED_64_NANOS = 1; - } - - oneof format { - // Expected format that contains a combination of regular characters and - // special format specifiers. Format specifiers are a subset of the - // strptime standard. - string string_format = 1; - - // Expected format of integer times. - IntegerTimeOfDayFormat integer_format = 2; - } -} -// END SEMANTIC-TYPES-PROTOS - -// Describes the physical representation of a feature. -// It may be different than the logical representation, which -// is represented as a Domain. -enum FeatureType { - TYPE_UNKNOWN = 0; - BYTES = 1; - INT = 2; - FLOAT = 3; - STRUCT = 4; -} - -// Describes constraints on the presence of the feature in the data. -message FeaturePresence { - // Minimum fraction of examples that have this feature. - optional double min_fraction = 1; - // Minimum number of examples that have this feature. - optional int64 min_count = 2; -} - -// Records constraints on the presence of a feature inside a "group" context -// (e.g., .presence inside a group of features that define a sequence). -message FeaturePresenceWithinGroup { - optional bool required = 1; -} - -// Checks that the L-infinity norm is below a certain threshold between the -// two discrete distributions. Since this is applied to a FeatureNameStatistics, -// it only considers the top k. -// L_infty(p,q) = max_i |p_i-q_i| -message InfinityNorm { - // The InfinityNorm is in the interval [0.0, 1.0] so sensible bounds should - // be in the interval [0.0, 1.0). - optional double threshold = 1; -} - -message FeatureComparator { - optional InfinityNorm infinity_norm = 1; -} - -// A TensorRepresentation captures the intent for converting columns in a -// dataset to TensorFlow Tensors (or more generally, tf.CompositeTensors). -// Note that one tf.CompositeTensor may consist of data from multiple columns, -// for example, a N-dimensional tf.SparseTensor may need N + 1 columns to -// provide the sparse indices and values. -// Note that the "column name" that a TensorRepresentation needs is a -// string, not a Path -- it means that the column name identifies a top-level -// Feature in the schema (i.e. you cannot specify a Feature nested in a STRUCT -// Feature). -message TensorRepresentation { - message DefaultValue { - oneof kind { - double float_value = 1; - // Note that the data column might be of a shorter integral type. It's the - // user's responsitiblity to make sure the default value fits that type. - int64 int_value = 2; - bytes bytes_value = 3; - // uint_value should only be used if the default value can't fit in a - // int64 (`int_value`). - uint64 uint_value = 4; - } - } - - // A tf.Tensor - message DenseTensor { - // Identifies the column in the dataset that provides the values of this - // Tensor. - optional string column_name = 1; - // The shape of each row of the data (i.e. does not include the batch - // dimension) - optional FixedShape shape = 2; - // If this column is missing values in a row, the default_value will be - // used to fill that row. - optional DefaultValue default_value = 3; - } - - // A ragged tf.SparseTensor that models nested lists. - message VarLenSparseTensor { - // Identifies the column in the dataset that should be converted to the - // VarLenSparseTensor. - optional string column_name = 1; - } - - // A tf.SparseTensor whose indices and values come from separate data columns. - // This will replace Schema.sparse_feature eventually. - // The index columns must be of INT type, and all the columns must co-occur - // and have the same valency at the same row. - message SparseTensor { - // The dense shape of the resulting SparseTensor (does not include the batch - // dimension). - optional FixedShape dense_shape = 1; - // The columns constitute the coordinates of the values. - // indices_column[i][j] contains the coordinate of the i-th dimension of the - // j-th value. - repeated string index_column_names = 2; - // The column that contains the values. - optional string value_column_name = 3; - } - - oneof kind { - DenseTensor dense_tensor = 1; - VarLenSparseTensor varlen_sparse_tensor = 2; - SparseTensor sparse_tensor = 3; - } -} - -// A TensorRepresentationGroup is a collection of TensorRepresentations with -// names. These names may serve as identifiers when converting the dataset -// to a collection of Tensors or tf.CompositeTensors. -// For example, given the following group: -// { -// key: "dense_tensor" -// tensor_representation { -// dense_tensor { -// column_name: "univalent_feature" -// shape { -// dim { -// size: 1 -// } -// } -// default_value { -// float_value: 0 -// } -// } -// } -// } -// { -// key: "varlen_sparse_tensor" -// tensor_representation { -// varlen_sparse_tensor { -// column_name: "multivalent_feature" -// } -// } -// } -// -// Then the schema is expected to have feature "univalent_feature" and -// "multivalent_feature", and when a batch of data is converted to Tensors using -// this TensorRepresentationGroup, the result may be the following dict: -// { -// "dense_tensor": tf.Tensor(...), -// "varlen_sparse_tensor": tf.SparseTensor(...), -// } -message TensorRepresentationGroup { - map tensor_representation = 1; -} diff --git a/protos/tensorflow_metadata/proto/v0/statistics.proto b/protos/tensorflow_metadata/proto/v0/statistics.proto deleted file mode 100644 index 3123dad874..0000000000 --- a/protos/tensorflow_metadata/proto/v0/statistics.proto +++ /dev/null @@ -1,427 +0,0 @@ -// Copyright 2017 The TensorFlow Authors. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// ============================================================================= - -// Definitions for aggregated feature statistics for datasets. -// TODO(b/80075690): make a Javascript build rule for this. -// TODO(b/80075691): migrate Facets to use this. -syntax = "proto3"; -option cc_enable_arenas = true; - -package tensorflow.metadata.v0; - -option java_package = "org.tensorflow.metadata.v0"; -option java_multiple_files = true; -option go_package = "github.com/feast-dev/feast/sdk/go/protos/tensorflow_metadata/proto/v0"; - -import "tensorflow_metadata/proto/v0/path.proto"; - -// Copied from Facets feature_statistics.proto -// Must be kept binary-compatible with the original, until all usages -// are updated to use this version, or we write a proto-to-proto converter. - -// A list of features statistics for different datasets. If you wish to compare -// different datasets using this list, then the DatasetFeatureStatistics -// entries should all contain the same list of features. -message DatasetFeatureStatisticsList { - repeated DatasetFeatureStatistics datasets = 1; -} - -// The feature statistics for a single dataset. -message DatasetFeatureStatistics { - // The name of the dataset. - string name = 1; - // The number of examples in the dataset. - uint64 num_examples = 2; - - // Only valid if the weight feature was specified. - // Treats a missing weighted feature as zero. - double weighted_num_examples = 4; - // The feature statistics for the dataset. - repeated FeatureNameStatistics features = 3; - - // Cross feature statistics for the dataset. - repeated CrossFeatureStatistics cross_features = 5; -} - -message CrossFeatureStatistics { - // The path of feature x. - Path path_x = 1; - // The path of feature y. - Path path_y = 2; - - // Number of occurrences of this feature cross in the data. If any of - // the features in the cross is missing, the example is ignored. - uint64 count = 3; - - oneof cross_stats { - NumericCrossStatistics num_cross_stats = 4; - CategoricalCrossStatistics categorical_cross_stats = 5; - } -} - -message NumericCrossStatistics { - // Pearson product-moment correlation coefficient. - float correlation = 1; - // Standard covariance. E[(X-E[X])*(Y-E[Y])] - float covariance = 2; -} - -message CategoricalCrossStatistics { - LiftStatistics lift = 1; -} - -message LiftStatistics { - // Lift information for each value of path_y. Lift is defined for each pair of - // values (x,y) as P(path_y=y|path_x=x)/P(path_y=y). - repeated LiftSeries lift_series = 1; - // Weighted lift information for each value of path_y. Weighted lift is - // defined for each pair of values (x,y) as P(path_y=y|path_x=x)/P(path_y=y) - // where probabilities are computed over weighted example space. - repeated LiftSeries weighted_lift_series = 2; -} - -// Container for lift information for a specific y-value. -message LiftSeries { - // A bucket for referring to binned numeric features. - message Bucket { - // The low value of the bucket, inclusive. - double low_value = 1; - // The high value of the bucket, exclusive (unless the high_value is - // positive infinity). - double high_value = 2; - } - - // The particular value of path_y corresponding to this LiftSeries. Each - // element in lift_values corresponds to the lift a different x_value and - // this specific y_value. - oneof y_value { - int32 y_int = 1; - string y_string = 2; - Bucket y_bucket = 3; - } - - // The number of examples in which y_value appears. - oneof y_count_value { - uint64 y_count = 4; - double weighted_y_count = 5; - } - - // A container for lift information about a specific value of path_x. - message LiftValue { - oneof x_value { - int32 x_int = 1; - string x_string = 2; - } - // P(path_y=y|path_x=x) / P(path_y=y) for x_value and the enclosing y_value. - // In terms of concrete fields, this number represents: - // (x_and_y_count / x_count) / (y_count / num_examples) - double lift = 3; - // The number of examples in which x_value appears. - oneof x_count_value { - uint64 x_count = 4; - double weighted_x_count = 5; - } - // The number of examples in which x_value appears and y_value appears. - oneof x_and_y_count_value { - uint64 x_and_y_count = 6; - double weighted_x_and_y_count = 7; - } - } - - // The lifts for a each path_x value and this y_value. - repeated LiftValue lift_values = 6; -} - -// The complete set of statistics for a given feature name for a dataset. -message FeatureNameStatistics { - // The types supported by the feature statistics. When aggregating - // tf.Examples, if the bytelist contains a string, it is recommended to encode - // it here as STRING instead of BYTES in order to calculate string-specific - // statistical measures. - enum Type { - INT = 0; - FLOAT = 1; - STRING = 2; - BYTES = 3; - STRUCT = 4; - } - - // One can identify a field either by the name (for simple fields), or by - // a path (for structured fields). Note that: - // name: "foo" - // is equivalent to: - // path: {step:"foo"} - // Note: this oneof must be consistently either name or path across all - // FeatureNameStatistics in one DatasetFeatureStatistics. - oneof field_id { - // The feature name - string name = 1; - - // The path of the feature. - Path path = 8; - } - - // The data type of the feature - Type type = 2; - - // The statistics of the values of the feature. - oneof stats { - NumericStatistics num_stats = 3; - StringStatistics string_stats = 4; - BytesStatistics bytes_stats = 5; - StructStatistics struct_stats = 7; - } - - // Any custom statistics can be stored in this list. - repeated CustomStatistic custom_stats = 6; -} - -// Common weighted statistics for all feature types. Statistics counting number -// of values (i.e., avg_num_values and tot_num_values) include NaNs. -// If the weighted column is missing, then this counts as a weight of 1 -// for that example. -message WeightedCommonStatistics { - // Weighted number of examples not missing. - double num_non_missing = 1; - // Weighted number of examples missing. - // Note that if the weighted column is zero, this does not count - // as missing. - double num_missing = 2; - // average number of values, weighted by the number of examples. - double avg_num_values = 3; - // tot_num_values = avg_num_values * num_non_missing. - // This is calculated directly, so should have less numerical error. - double tot_num_values = 4; -} - -// Stores the name and value of any custom statistic. The value can be a string, -// double, or histogram. -message CustomStatistic { - string name = 1; - oneof val { - double num = 2; - string str = 3; - Histogram histogram = 4; - RankHistogram rank_histogram = 5; - } -} - -// Statistics for a numeric feature in a dataset. -message NumericStatistics { - CommonStatistics common_stats = 1; - // The mean of the values - double mean = 2; - // The standard deviation of the values - double std_dev = 3; - // The number of values that equal 0 - uint64 num_zeros = 4; - // The minimum value - double min = 5; - // The median value - double median = 6; - // The maximum value - double max = 7; - // The histogram(s) of the feature values. - repeated Histogram histograms = 8; - - // Weighted statistics for the feature, if the values have weights. - WeightedNumericStatistics weighted_numeric_stats = 9; -} - -// Statistics for a string feature in a dataset. -message StringStatistics { - CommonStatistics common_stats = 1; - // The number of unique values - uint64 unique = 2; - - message FreqAndValue { - string value = 2; - - // The number of times the value occurs. Stored as a double to be able to - // handle weighted features. - double frequency = 3; - - // Deleted fields. - reserved 1; - } - // A sorted list of the most-frequent values and their frequencies, with - // the most-frequent being first. - repeated FreqAndValue top_values = 3; - - // The average length of the values - float avg_length = 4; - - // The rank histogram for the values of the feature. - // The rank is used to measure of how commonly the value is found in the - // dataset. The most common value would have a rank of 1, with the second-most - // common value having a rank of 2, and so on. - RankHistogram rank_histogram = 5; - - // Weighted statistics for the feature, if the values have weights. - WeightedStringStatistics weighted_string_stats = 6; - - // A vocabulary file, used for vocabularies too large to store in the proto - // itself. Note that the file may be relative to some context-dependent - // directory. E.g. in TFX the feature statistics will live in a PPP and - // vocabulary file names will be relative to this PPP. - string vocabulary_file = 7; -} - -// Statistics for a weighted numeric feature in a dataset. -message WeightedNumericStatistics { - // The weighted mean of the values - double mean = 1; - // The weighted standard deviation of the values - double std_dev = 2; - // The weighted median of the values - double median = 3; - - // The histogram(s) of the weighted feature values. - repeated Histogram histograms = 4; -} - -// Statistics for a weighted string feature in a dataset. -message WeightedStringStatistics { - // A sorted list of the most-frequent values and their weighted frequencies, - // with the most-frequent being first. - repeated StringStatistics.FreqAndValue top_values = 1; - - // The rank histogram for the weighted values of the feature. - RankHistogram rank_histogram = 2; -} - -// Statistics for a bytes feature in a dataset. -message BytesStatistics { - CommonStatistics common_stats = 1; - // The number of unique values - uint64 unique = 2; - - // The average number of bytes in a value - float avg_num_bytes = 3; - // The minimum number of bytes in a value - float min_num_bytes = 4; - // The maximum number of bytes in a value - float max_num_bytes = 5; -} - -message StructStatistics { - CommonStatistics common_stats = 1; -} - -// Common statistics for all feature types. Statistics counting number of values -// (i.e., min_num_values, max_num_values, avg_num_values, and tot_num_values) -// include NaNs. -message CommonStatistics { - // The number of examples with at least one value for this feature. - uint64 num_non_missing = 1; - // The number of examples with no values for this feature. - uint64 num_missing = 2; - // The minimum number of values in a single example for this feature. - uint64 min_num_values = 3; - // The maximum number of values in a single example for this feature. - uint64 max_num_values = 4; - // The average number of values in a single example for this feature. - float avg_num_values = 5; - // tot_num_values = avg_num_values * num_non_missing. - // This is calculated directly, so should have less numerical error. - uint64 tot_num_values = 8; - // The quantiles histogram for the number of values in this feature. - Histogram num_values_histogram = 6; - WeightedCommonStatistics weighted_common_stats = 7; - // The histogram for the number of features in the feature list (only set if - // this feature is a non-context feature from a tf.SequenceExample). - // This is different from num_values_histogram, as num_values_histogram tracks - // the count of all values for a feature in an example, whereas this tracks - // the length of the feature list for this feature in an example (where each - // feature list can contain multiple values). - Histogram feature_list_length_histogram = 9; -} - -// The data used to create a histogram of a numeric feature for a dataset. -message Histogram { - // Each bucket defines its low and high values along with its count. The - // low and high values must be a real number or positive or negative - // infinity. They cannot be NaN or undefined. Counts of those special values - // can be found in the numNaN and numUndefined fields. - message Bucket { - // The low value of the bucket, inclusive. - double low_value = 1; - // The high value of the bucket, exclusive (unless the highValue is - // positive infinity). - double high_value = 2; - - // The number of items in the bucket. Stored as a double to be able to - // handle weighted histograms. - double sample_count = 4; - - // Deleted fields. - reserved 3; - } - - // The number of NaN values in the dataset. - uint64 num_nan = 1; - // The number of undefined values in the dataset. - uint64 num_undefined = 2; - - // A list of buckets in the histogram, sorted from lowest bucket to highest - // bucket. - repeated Bucket buckets = 3; - - // The type of the histogram. A standard histogram has equal-width buckets. - // The quantiles type is used for when the histogram message is used to store - // quantile information (by using equal-count buckets with variable widths). - enum HistogramType { - STANDARD = 0; - QUANTILES = 1; - } - - // The type of the histogram. - HistogramType type = 4; - - // An optional descriptive name of the histogram, to be used for labeling. - string name = 5; -} - -// The data used to create a rank histogram of a non-numeric feature of a -// dataset. The rank of a value in a feature can be used as a measure of how -// commonly the value is found in the entire dataset. With bucket sizes of one, -// this becomes a distribution function of all feature values. -message RankHistogram { - // Each bucket defines its start and end ranks along with its count. - message Bucket { - // The low rank of the bucket, inclusive. - uint64 low_rank = 1; - // The high rank of the bucket, exclusive. - uint64 high_rank = 2; - - // The label for the bucket. Can be used to list or summarize the values in - // this rank bucket. - string label = 4; - - // The number of items in the bucket. Stored as a double to be able to - // handle weighted histograms. - double sample_count = 5; - - // Deleted fields. - reserved 3; - } - - // A list of buckets in the histogram, sorted from lowest-ranked bucket to - // highest-ranked bucket. - repeated Bucket buckets = 1; - - // An optional descriptive name of the histogram, to be used for labeling. - string name = 2; -} \ No newline at end of file diff --git a/sdk/go/README.md b/sdk/go/README.md deleted file mode 100644 index 79211df4a5..0000000000 --- a/sdk/go/README.md +++ /dev/null @@ -1,49 +0,0 @@ -# Feast Golang SDK - -The Feast golang SDK currently only supports retrieval from online stores. - -## Quickstart -```{go} -import ( - "context" - feast "github.com/feast-dev/feast/sdk/go" -) - -func main() { - cli, err := feast.NewGrpcClient("localhost", 6565) - if err != nil { - panic(err) - } - - ctx := context.Background() - req := feast.OnlineFeaturesRequest{ - Features: []string{"my_project_1/feature1", "my_project_2/feature1", "my_project_4/feature3", "feature2", "feature2"}, - Entities: []feast.Row{ - {"entity1": feast.Int64Val(1), "entity2": feast.StrVal("bob")}, - {"entity1": feast.Int64Val(1), "entity2": feast.StrVal("annie")}, - {"entity1": feast.Int64Val(1), "entity2": feast.StrVal("jane")}, - }, - Project: "my_project_3", - } - - resp, err := cli.GetOnlineFeatures(ctx, &req) - if err != nil { - panic(err) - } - - // returns a list of rows (map[string]featureValue) - out := resp.Rows() -} - -``` - -If all features retrieved are of a single type, Feast provides convenience functions to retrieve your features as a vector of feature values: -```{go} -arr, err := resp.Int64Arrays( - []string{"my_project_1/feature1", - "my_project_2/feature1", - "my_project_4/feature3", - "feature2", - "feature2"}, // order of features - []int64{1,2,3,4,5}) // fillNa values -``` diff --git a/sdk/go/auth.go b/sdk/go/auth.go deleted file mode 100644 index 1b6703dae1..0000000000 --- a/sdk/go/auth.go +++ /dev/null @@ -1,136 +0,0 @@ -package feast - -import ( - "bytes" - "context" - "encoding/json" - "fmt" - "golang.org/x/oauth2" - "golang.org/x/oauth2/google" - "google.golang.org/api/idtoken" - "io/ioutil" - "net/http" - "net/url" -) - -// Credential provides OIDC ID tokens used when authenticating with Feast. -// Implements credentials.PerRPCCredentials -type Credential struct { - tokenSrc oauth2.TokenSource -} - -// GetRequestMetadata attaches OIDC token as metadata, refreshing tokens if required. -// This should be called by the GRPC to authenticate each request. -func (provider *Credential) GetRequestMetadata(ctx context.Context, uri ...string) (map[string]string, error) { - token, err := provider.tokenSrc.Token() - if err != nil { - return map[string]string{}, nil - } - return map[string]string{ - "Authorization": "Bearer " + token.AccessToken, - }, nil -} - -// Disable requirement of transport security to allow user to configure it explictly instead. -func (provider *Credential) RequireTransportSecurity() bool { - return false -} - -// Create a Static Authentication Provider that provides a static token -func NewStaticCredential(token string) *Credential { - return &Credential{tokenSrc: oauth2.StaticTokenSource( - &oauth2.Token{ - AccessToken: token, - }), - } -} - -func newGoogleCredential( - audience string, - findDefaultCredentials func(ctx context.Context, scopes ...string) (*google.Credentials, error), - makeTokenSource func(ctx context.Context, audience string, opts ...idtoken.ClientOption) (oauth2.TokenSource, error)) (*Credential, error) { - // Refresh a Google Id token - // Attempt to id token from Google Application Default Credentials - ctx := context.Background() - creds, err := findDefaultCredentials(ctx, "openid", "email") - if err != nil { - return nil, err - } - tokenSrc, err := makeTokenSource(ctx, audience, idtoken.WithCredentialsJSON(creds.JSON)) - if err != nil { - return nil, err - } - return &Credential{tokenSrc: tokenSrc}, nil -} - -// Creates a new Google Credential which obtains credentials from Application Default Credentials -func NewGoogleCredential(audience string) (*Credential, error) { - return newGoogleCredential(audience, google.FindDefaultCredentials, idtoken.NewTokenSource) -} - -// Creates a new OAuth credential witch obtains credentials by making a client credentials request to an OAuth endpoint. -// clientId, clientSecret - Client credentials used to authenticate the client when obtaining credentials. -// endpointURL - target URL of the OAuth endpoint to make the OAuth request to. -func NewOAuthCredential(audience string, clientId string, clientSecret string, endpointURL *url.URL) *Credential { - tokenSrc := &oauthTokenSource{ - clientId: clientId, - clientSecret: clientSecret, - endpointURL: endpointURL, - audience: audience, - } - return &Credential{tokenSrc: tokenSrc} -} - -// Defines a Token Source that obtains tokens via making a OAuth client credentials request. -type oauthTokenSource struct { - clientId string - clientSecret string - endpointURL *url.URL - audience string - token *oauth2.Token -} - -// Defines a Oauth cleint credentials request. -type oauthClientCredientialsRequest struct { - GrantType string `json:"grant_type"` - ClientId string `json:"client_id"` - ClientSecret string `json:"client_secret"` - Audience string `json:"audience"` -} - -// Obtain or Refresh token from OAuth Token Source. -func (tokenSrc *oauthTokenSource) Token() (*oauth2.Token, error) { - if tokenSrc.token == nil || !tokenSrc.token.Valid() { - // Refresh Oauth Id token by making Oauth client credentials request - req := &oauthClientCredientialsRequest{ - GrantType: "client_credentials", - ClientId: tokenSrc.clientId, - ClientSecret: tokenSrc.clientSecret, - Audience: tokenSrc.audience, - } - - reqBytes, err := json.Marshal(req) - if err != nil { - return nil, err - } - resp, err := http.Post(tokenSrc.endpointURL.String(), - "application/json", bytes.NewBuffer(reqBytes)) - if err != nil { - return nil, err - } - if resp.StatusCode != http.StatusOK { - return nil, fmt.Errorf("OAuth Endpoint returned unexpected status: %s", resp.Status) - } - respBytes, err := ioutil.ReadAll(resp.Body) - if err != nil { - return nil, err - } - tokenSrc.token = &oauth2.Token{} - err = json.Unmarshal(respBytes, tokenSrc.token) - if err != nil { - return nil, err - } - } - - return tokenSrc.token, nil -} diff --git a/sdk/go/auth_test.go b/sdk/go/auth_test.go deleted file mode 100644 index 5c1711994c..0000000000 --- a/sdk/go/auth_test.go +++ /dev/null @@ -1,142 +0,0 @@ -package feast - -import ( - "context" - "encoding/json" - "fmt" - "io/ioutil" - "net/http" - "net/http/httptest" - "net/url" - "testing" - - "golang.org/x/oauth2" - "golang.org/x/oauth2/google" - "google.golang.org/api/idtoken" -) - -// Returns a mocked google credential. -func mockGoogleCredential(token string, targetAudience string) (*Credential, error) { - // mock find default credentials implementation. - findDefaultCredentials := func(ctx context.Context, scopes ...string) (*google.Credentials, error) { - if len(scopes) != 2 && scopes[0] != "openid" && scopes[1] != "email" { - return nil, fmt.Errorf("Got bad scopes. Expected 'openid', 'email'") - } - - return &google.Credentials{ - ProjectID: "project_id", - JSON: []byte("mock key json"), - }, nil - } - - // mock id token source implementation. - makeTokenSource := func(ctx context.Context, audience string, opts ...idtoken.ClientOption) (oauth2.TokenSource, error) { - // unable to check opts as ClientOption refrences internal type. - if targetAudience != audience { - return nil, fmt.Errorf("Audience does not match up with target audience") - } - - return oauth2.StaticTokenSource(&oauth2.Token{ - AccessToken: "google token", - }), nil - } - - return newGoogleCredential(targetAudience, findDefaultCredentials, makeTokenSource) -} - -// Create a mocked OAuth credential with a backing mocked OAuth server. -func mockOAuthCredential(token string, audience string) (*httptest.Server, *Credential) { - clientId := "id" - clientSecret := "secret" - path := "/oauth" - - // Create a mock OAuth server to test Oauth provider. - handlers := http.NewServeMux() - handlers.HandleFunc(path, func(resp http.ResponseWriter, req *http.Request) { - reqBytes, err := ioutil.ReadAll(req.Body) - if err != nil { - resp.WriteHeader(http.StatusBadRequest) - } - - oauthReq := oauthClientCredientialsRequest{} - err = json.Unmarshal(reqBytes, &oauthReq) - if err != nil { - resp.WriteHeader(http.StatusBadRequest) - } - - if oauthReq.GrantType != "client_credentials" || - oauthReq.ClientId != clientId || - oauthReq.ClientSecret != clientSecret || - oauthReq.Audience != audience { - resp.WriteHeader(http.StatusUnauthorized) - } - - _, err = resp.Write([]byte(fmt.Sprintf("{\"access_token\": \"%s\"}", token))) - if err != nil { - resp.WriteHeader(http.StatusInternalServerError) - } - }) - - srv := httptest.NewServer(handlers) - endpointURL, _ := url.Parse(srv.URL + path) - return srv, NewOAuthCredential(audience, clientId, clientSecret, endpointURL) -} - -func TestCredentials(t *testing.T) { - audience := "localhost" - srv, oauthCred := mockOAuthCredential("oauth token", audience) - defer srv.Close() - googleCred, err := mockGoogleCredential("google token", audience) - if err != nil { - t.Errorf("Unexpected error creating mock google credential: %v", err) - } - - tt := []struct { - name string - credential *Credential - want string - wantErr bool - err error - }{ - { - name: "Valid Static Credential get authentication metadata.", - credential: NewStaticCredential("static token"), - want: "static token", - wantErr: false, - err: nil, - }, - { - name: "Valid Google Credential get authentication metadata.", - credential: googleCred, - want: "google token", - wantErr: false, - err: nil, - }, - { - name: "Valid OAuth Credential get authentication metadata.", - credential: oauthCred, - want: "oauth token", - wantErr: false, - err: nil, - }, - } - - for _, tc := range tt { - t.Run(tc.name, func(t *testing.T) { - ctx := context.Background() - meta, err := tc.credential.GetRequestMetadata(ctx, "feast.serving") - if err != nil { - t.Error(err) - } - authKey := "Authorization" - if _, ok := meta[authKey]; !ok { - t.Errorf("Expected authentication metadata with key: '%s'", authKey) - } - - expectedVal := "Bearer " + tc.want - if meta[authKey] != expectedVal { - t.Errorf("Expected authentication metadata with value: '%s' Got instead: '%s'", expectedVal, meta[authKey]) - } - }) - } -} diff --git a/sdk/go/client.go b/sdk/go/client.go deleted file mode 100644 index 4deb0a789c..0000000000 --- a/sdk/go/client.go +++ /dev/null @@ -1,134 +0,0 @@ -package feast - -import ( - "context" - "crypto/x509" - "fmt" - - "github.com/feast-dev/feast/sdk/go/protos/feast/serving" - "github.com/opentracing-contrib/go-grpc" - "github.com/opentracing/opentracing-go" - "go.opencensus.io/plugin/ocgrpc" - "google.golang.org/grpc" - "google.golang.org/grpc/credentials" -) - -// Client is a feast serving client. -type Client interface { - GetOnlineFeatures(ctx context.Context, req *OnlineFeaturesRequest) (*OnlineFeaturesResponse, error) - GetFeastServingInfo(ctx context.Context, in *serving.GetFeastServingInfoRequest) (*serving.GetFeastServingInfoResponse, error) - Close() error -} - -// GrpcClient is a grpc client for feast serving. -type GrpcClient struct { - cli serving.ServingServiceClient - conn *grpc.ClientConn -} - -// SecurityConfig wraps security config for GrpcClient -type SecurityConfig struct { - // Whether to enable TLS SSL trasnport security if true. - EnableTLS bool - // Optional: Provides path to TLS certificate used the verify Service identity. - TLSCertPath string - // Optional: Credential used for authentication. - // Disables authentication if unspecified. - Credential *Credential -} - -// NewGrpcClient constructs a client that can interact via grpc with the feast serving instance at the given host:port. -func NewGrpcClient(host string, port int) (*GrpcClient, error) { - return NewSecureGrpcClient(host, port, SecurityConfig{ - EnableTLS: false, - Credential: nil, - }) -} - -// NewSecureGrpcClient constructs a secure client that uses security features (ie authentication). -// host - hostname of the serving host/instance to connect to. -// port - post of the host to service host/instancf to connect to. -// securityConfig - security config configures client security. -func NewSecureGrpcClient(host string, port int, security SecurityConfig) (*GrpcClient, error) { - return NewSecureGrpcClientWithDialOptions(host, port, security) -} - -// NewSecureGrpcClientWithDialOptions constructs a secure client that uses security features (ie authentication) along with custom grpc dial options. -// host - hostname of the serving host/instance to connect to. -// port - post of the host to service host/instancf to connect to. -// securityConfig - security config configures client security. -// opts - grpc.DialOptions which should be used with this connection -func NewSecureGrpcClientWithDialOptions(host string, port int, security SecurityConfig, opts ...grpc.DialOption) (*GrpcClient, error) { - feastCli := &GrpcClient{} - adr := fmt.Sprintf("%s:%d", host, port) - - // Compile grpc dial options from security config. - options := append(opts, grpc.WithStatsHandler(&ocgrpc.ClientHandler{})) - // Configure client TLS. - if !security.EnableTLS { - options = append(options, grpc.WithInsecure()) - } else if security.EnableTLS && security.TLSCertPath != "" { - // Read TLS certificate from given path. - tlsCreds, err := credentials.NewClientTLSFromFile(security.TLSCertPath, "") - if err != nil { - return nil, err - } - options = append(options, grpc.WithTransportCredentials(tlsCreds)) - } else { - // Use system TLS certificate pool. - certPool, err := x509.SystemCertPool() - if err != nil { - return nil, err - } - tlsCreds := credentials.NewClientTLSFromCert(certPool, "") - options = append(options, grpc.WithTransportCredentials(tlsCreds)) - } - - // Enable authentication by attaching credentials if given - if security.Credential != nil { - options = append(options, grpc.WithPerRPCCredentials(security.Credential)) - } - - // Enable tracing if a global tracer is registered - tracingInterceptor := grpc.WithUnaryInterceptor( - otgrpc.OpenTracingClientInterceptor(opentracing.GlobalTracer())) - options = append(options, tracingInterceptor) - - conn, err := grpc.Dial(adr, options...) - if err != nil { - return nil, err - } - feastCli.cli = serving.NewServingServiceClient(conn) - feastCli.conn = conn - return feastCli, nil -} - -// GetOnlineFeatures gets the latest values of the request features from the Feast serving instance provided. -func (fc *GrpcClient) GetOnlineFeatures(ctx context.Context, req *OnlineFeaturesRequest) ( - *OnlineFeaturesResponse, error) { - featuresRequest, err := req.buildRequest() - if err != nil { - return nil, err - } - resp, err := fc.cli.GetOnlineFeaturesV2(ctx, featuresRequest) - - // collect unqiue entity refs from entity rows - entityRefs := make(map[string]struct{}) - for _, entityRows := range req.Entities { - for ref := range entityRows { - entityRefs[ref] = struct{}{} - } - } - return &OnlineFeaturesResponse{RawResponse: resp}, err -} - -// GetFeastServingInfo gets information about the feast serving instance this client is connected to. -func (fc *GrpcClient) GetFeastServingInfo(ctx context.Context, in *serving.GetFeastServingInfoRequest) ( - *serving.GetFeastServingInfoResponse, error) { - return fc.cli.GetFeastServingInfo(ctx, in) -} - -// Close the grpc connection. -func (fc *GrpcClient) Close() error { - return fc.conn.Close() -} diff --git a/sdk/go/client_test.go b/sdk/go/client_test.go deleted file mode 100644 index a94a577e84..0000000000 --- a/sdk/go/client_test.go +++ /dev/null @@ -1,84 +0,0 @@ -package feast - -import ( - "context" - "testing" - - "github.com/feast-dev/feast/sdk/go/mocks" - "github.com/feast-dev/feast/sdk/go/protos/feast/serving" - "github.com/feast-dev/feast/sdk/go/protos/feast/types" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -func TestGetOnlineFeatures(t *testing.T) { - tt := []struct { - name string - req OnlineFeaturesRequest - recieve OnlineFeaturesResponse - want OnlineFeaturesResponse - wantErr bool - err error - }{ - { - name: "Valid client Get Online Features call", - req: OnlineFeaturesRequest{ - Features: []string{ - "driver:rating", - "driver:null_value", - }, - Entities: []Row{ - {"driver_id": Int64Val(1)}, - }, - Project: "driver_project", - }, - want: OnlineFeaturesResponse{ - RawResponse: &serving.GetOnlineFeaturesResponse{ - FieldValues: []*serving.GetOnlineFeaturesResponse_FieldValues{ - { - Fields: map[string]*types.Value{ - "driver:rating": Int64Val(1), - "driver:null_value": {}, - }, - Statuses: map[string]serving.GetOnlineFeaturesResponse_FieldStatus{ - "driver:rating": serving.GetOnlineFeaturesResponse_PRESENT, - "driver:null_value": serving.GetOnlineFeaturesResponse_NULL_VALUE, - }, - }, - }, - }, - }, - }, - } - - for _, tc := range tt { - t.Run(tc.name, func(t *testing.T) { - // mock feast grpc client get online feature requestss - ctrl := gomock.NewController(t) - defer ctrl.Finish() - cli := mock_serving.NewMockServingServiceClient(ctrl) - ctx := context.Background() - rawRequest, _ := tc.req.buildRequest() - resp := tc.want.RawResponse - cli.EXPECT().GetOnlineFeaturesV2(ctx, rawRequest).Return(resp, nil).Times(1) - - client := &GrpcClient{ - cli: cli, - } - got, err := client.GetOnlineFeatures(ctx, &tc.req) - - if err != nil && !tc.wantErr { - t.Errorf("error = %v, wantErr %v", err, tc.wantErr) - return - } - if tc.wantErr && err.Error() != tc.err.Error() { - t.Errorf("error = %v, expected err = %v", err, tc.err) - return - } - // TODO: compare directly once OnlineFeaturesResponse no longer embeds a rawResponse. - if !cmp.Equal(got.RawResponse.String(), tc.want.RawResponse.String()) { - t.Errorf("got: \n%v\nwant:\n%v", got.RawResponse.String(), tc.want.RawResponse.String()) - } - }) - } -} diff --git a/sdk/go/go.mod b/sdk/go/go.mod deleted file mode 100644 index d3b454d55c..0000000000 --- a/sdk/go/go.mod +++ /dev/null @@ -1,16 +0,0 @@ -module github.com/feast-dev/feast/sdk/go - -go 1.13 - -require ( - github.com/golang/mock v1.4.3 - github.com/golang/protobuf v1.4.2 - github.com/google/go-cmp v0.5.1 - github.com/opentracing-contrib/go-grpc v0.0.0-20200813121455-4a6760c71486 - github.com/opentracing/opentracing-go v1.1.0 - go.opencensus.io v0.22.4 - golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d - google.golang.org/api v0.30.0 - google.golang.org/grpc v1.31.0 - google.golang.org/protobuf v1.25.0 -) diff --git a/sdk/go/go.sum b/sdk/go/go.sum deleted file mode 100644 index f9a22785a5..0000000000 --- a/sdk/go/go.sum +++ /dev/null @@ -1,404 +0,0 @@ -cloud.google.com/go v0.26.0 h1:e0WKqKTd5BnrG8aKH3J3h+QvEIQtSUcf2n5UZ5ZgLtQ= -cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= -cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= -cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= -cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= -cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= -cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= -cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= -cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= -cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= -cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= -cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= -cloud.google.com/go v0.62.0 h1:RmDygqvj27Zf3fCQjQRtLyC7KwFcHkeJitcO0OoGOcA= -cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= -cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= -cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= -cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= -cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= -cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= -cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= -cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= -cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= -cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= -cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= -cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= -cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= -cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= -cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= -cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= -cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= -cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= -dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= -github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= -github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= -github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= -github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= -github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= -github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= -github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= -github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8= -github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= -github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= -github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= -github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= -github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= -github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= -github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= -github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b h1:VKtxabqXZkF25pY9ekfRL6a582T4P37/31XEstQ5p58= -github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= -github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6 h1:ZgQEtGgCBiWRM39fZuwSd1LwSqqSW0hOdXCYYDX0R3I= -github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e h1:1r7pUrabqp18hOBcwBwiTsbnFeTZHV9eER/QT5JVZxY= -github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/mock v1.1.1 h1:G5FRp8JnTd7RQH5kemVNlMeyXQAztQ3mOWV95KxsXH8= -github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= -github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.3 h1:GV+pQPG/EUUbkh47niozDcADz6go/dUwhVzdUQHIVRw= -github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.2 h1:6nsPYzhq5kReh6QImI3k5qWzO4PEbvbIW2cwSfR/6xs= -github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.3 h1:gyjaxf+svBWX08ZjK86iN9geUJF0H6gp2IRKX6Nf6/I= -github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= -github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= -github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= -github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= -github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0 h1:aRz0NBceriICVtjhCgKkDvl+RudKu1CT6h0ZvUTrNfE= -github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= -github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= -github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= -github.com/golang/protobuf v1.4.2 h1:+Z5KGCizgyZCbGh1KZqA0fcLLkwbsjIzS4aV2v7wJX0= -github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= -github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= -github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= -github.com/google/go-cmp v0.2.0 h1:+dTQ8DZQJz0Mb/HjFlkptS1FeQ4cWSnN941F8aEG4SQ= -github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= -github.com/google/go-cmp v0.3.0 h1:crn/baboCvb5fXaQ0IJ1SGTsTVrWpDsCWC8EGETZijY= -github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.3.1 h1:Xye71clBPdm5HgqGwUkwhbynsUJZhDbS20FvLhQ2izg= -github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.4.0 h1:xsAVV57WRhGj6kEIi8ReJzQlHHqcBYCElAvkovg3B/4= -github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.1 h1:JFrFEBb2xKufg6XkJsJr+WbKb4FQlURi5RUcBveYu9k= -github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= -github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= -github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= -github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= -github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= -github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= -github.com/googleapis/gax-go/v2 v2.0.5 h1:sjZBwGj9Jlw33ImPtvFviGYvseOtDM7hkSKB7+Tv3SM= -github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= -github.com/grpc-ecosystem/grpc-opentracing v0.0.0-20180507213350-8e809c8a8645/go.mod h1:6iZfnjpejD4L/4DwD7NryNaJyCQdzwWwH2MWhCA90Kw= -github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= -github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= -github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= -github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= -github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= -github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= -github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= -github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= -github.com/opentracing-contrib/go-grpc v0.0.0-20200813121455-4a6760c71486 h1:K35HCWaOTJIPW6cDHK4yj3QfRY/NhE0pBbfoc0M2NMQ= -github.com/opentracing-contrib/go-grpc v0.0.0-20200813121455-4a6760c71486/go.mod h1:DYR5Eij8rJl8h7gblRrOZ8g0kW1umSpKqYIBTgeDtLo= -github.com/opentracing/opentracing-go v1.1.0 h1:pWlfV3Bxv7k65HYwkikxat0+s3pV4bsqf19k25Ur8rU= -github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= -github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= -github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= -github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk= -github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= -github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= -go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= -go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.4 h1:LYy1Hy3MJdrCdMwwzxA/dRok4ejH+RwNGbuoD9fCjto= -go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= -golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= -golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= -golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= -golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= -golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= -golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= -golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= -golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= -golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= -golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= -golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= -golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= -golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= -golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= -golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= -golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= -golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190311183353-d8887717615a h1:oWX7TPOiFAMXLq8o0ikBYfCJVlRHBcsciT5bXOrH628= -golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= -golang.org/x/net v0.0.0-20190620200207-3b0461eec859 h1:R/3boaszxrf1GEUWTVDzSKVwLmSJpwZ1yqXm8j0v2QI= -golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190921015927-1a5e07d1ff72/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20200707034311-ab3426394381 h1:VXak5I6aEWmAXeQjA+QSZzlgNrpq9mjcfDemuexIKsU= -golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be h1:vEDujvNQGv4jgYKudGeI/+DAX4Jffq6hpD55MmoEvKs= -golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= -golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d h1:TzXSXBo42m9gQenoE3b9BGiEpg5IG2JkU5FkPIawgtw= -golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd h1:r7DufRZuZbWB7j439YfAzP8RPDa9unLkpwQKUYbIMPI= -golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200803210538-64077c9b5642 h1:B6caxRw+hozq68X2MY7jEpZh/cr4/aHLv9xU8Kkadrw= -golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg= -golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs= -golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= -golang.org/x/text v0.3.3 h1:cokOdA+Jmi5PJGXLlLllQSgYigAEfHXJAERHVMaCc2k= -golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= -golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135 h1:5Beo0mZN8dRzgrMMkDp0jc8YXQKx9DiJ2k1dkvGsn5A= -golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= -golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= -golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= -golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= -golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= -google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= -google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= -google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= -google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= -google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= -google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= -google.golang.org/api v0.30.0 h1:yfrXXP61wVuLb0vBcG6qaOoIoqYEzOQS8jum51jkv2w= -google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= -google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= -google.golang.org/appengine v1.4.0 h1:/wp5JvzpHIxhs/dumFmF7BXTf3Z+dd4uXta4kVyO508= -google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= -google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/appengine v1.6.6 h1:lMO5rYAqUxkmaj76jAkRUvt5JZgFymx/+Q5Mzfivuhc= -google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8 h1:Nw54tB0rB7hY/N0NQvRW8DG4Yk3Q6T9cu9RcFQDu1tc= -google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= -google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb h1:i1Ppqkc3WQXikh8bXiwHqAN5Rv3/qDCcRk0/Otx73BY= -google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= -google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55 h1:gSJIx1SDwno+2ElGhA4+qG2zF97qiUzTM+rQ0klBOcE= -google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= -google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= -google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= -google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= -google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= -google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= -google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c h1:Lq4llNryJoaVFRmvrIwC/ZHH7tNt4tUYIu8+se2aayY= -google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= -google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= -google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= -google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= -google.golang.org/grpc v1.23.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= -google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= -google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.28.0 h1:bO/TA4OxCOummhSf10siHuG7vJOiwh7SpRpFZDkOgl4= -google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= -google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= -google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.31.0 h1:T7P4R73V3SSDPhH7WW7ATbfViLtmamH0DKrP3f9AuDI= -google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= -google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= -google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= -google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= -google.golang.org/protobuf v1.21.0 h1:qdOKuR/EIArgaWNjetjgTzgVTAZ+S/WXVrq9HW9zimw= -google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= -google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= -google.golang.org/protobuf v1.25.0 h1:Ejskq+SyPohKW+1uil0JJMtmHCgJPJ/qWTxr8qp+R4c= -google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY= -gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= -gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= -gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= -honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= -honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= -rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= -rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= -rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= diff --git a/sdk/go/mocks/serving_mock.go b/sdk/go/mocks/serving_mock.go deleted file mode 100644 index 00d2e768ef..0000000000 --- a/sdk/go/mocks/serving_mock.go +++ /dev/null @@ -1,77 +0,0 @@ -// Code generated by MockGen. DO NOT EDIT. -// Source: github.com/feast-dev/feast/sdk/go/protos/feast/serving (interfaces: ServingServiceClient) - -// Package mock_serving is a generated GoMock package. -package mock_serving - -import ( - context "context" - reflect "reflect" - - serving "github.com/feast-dev/feast/sdk/go/protos/feast/serving" - gomock "github.com/golang/mock/gomock" - grpc "google.golang.org/grpc" -) - -// MockServingServiceClient is a mock of ServingServiceClient interface -type MockServingServiceClient struct { - ctrl *gomock.Controller - recorder *MockServingServiceClientMockRecorder -} - -// MockServingServiceClientMockRecorder is the mock recorder for MockServingServiceClient -type MockServingServiceClientMockRecorder struct { - mock *MockServingServiceClient -} - -// NewMockServingServiceClient creates a new mock instance -func NewMockServingServiceClient(ctrl *gomock.Controller) *MockServingServiceClient { - mock := &MockServingServiceClient{ctrl: ctrl} - mock.recorder = &MockServingServiceClientMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use -func (m *MockServingServiceClient) EXPECT() *MockServingServiceClientMockRecorder { - return m.recorder -} - -// GetFeastServingInfo mocks base method -func (m *MockServingServiceClient) GetFeastServingInfo(arg0 context.Context, arg1 *serving.GetFeastServingInfoRequest, arg2 ...grpc.CallOption) (*serving.GetFeastServingInfoResponse, error) { - m.ctrl.T.Helper() - varargs := []interface{}{arg0, arg1} - for _, a := range arg2 { - varargs = append(varargs, a) - } - ret := m.ctrl.Call(m, "GetFeastServingInfo", varargs...) - ret0, _ := ret[0].(*serving.GetFeastServingInfoResponse) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetFeastServingInfo indicates an expected call of GetFeastServingInfo -func (mr *MockServingServiceClientMockRecorder) GetFeastServingInfo(arg0, arg1 interface{}, arg2 ...interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - varargs := append([]interface{}{arg0, arg1}, arg2...) - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetFeastServingInfo", reflect.TypeOf((*MockServingServiceClient)(nil).GetFeastServingInfo), varargs...) -} - -// GetOnlineFeaturesV2 mocks base method -func (m *MockServingServiceClient) GetOnlineFeaturesV2(arg0 context.Context, arg1 *serving.GetOnlineFeaturesRequestV2, arg2 ...grpc.CallOption) (*serving.GetOnlineFeaturesResponse, error) { - m.ctrl.T.Helper() - varargs := []interface{}{arg0, arg1} - for _, a := range arg2 { - varargs = append(varargs, a) - } - ret := m.ctrl.Call(m, "GetOnlineFeaturesV2", varargs...) - ret0, _ := ret[0].(*serving.GetOnlineFeaturesResponse) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetOnlineFeaturesV2 indicates an expected call of GetOnlineFeaturesV2 -func (mr *MockServingServiceClientMockRecorder) GetOnlineFeaturesV2(arg0, arg1 interface{}, arg2 ...interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - varargs := append([]interface{}{arg0, arg1}, arg2...) - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetOnlineFeaturesV2", reflect.TypeOf((*MockServingServiceClient)(nil).GetOnlineFeaturesV2), varargs...) -} diff --git a/sdk/go/protos/feast/core/CoreService.pb.go b/sdk/go/protos/feast/core/CoreService.pb.go deleted file mode 100644 index 2d9b0a8e0a..0000000000 --- a/sdk/go/protos/feast/core/CoreService.pb.go +++ /dev/null @@ -1,3177 +0,0 @@ -// -// Copyright 2018 The Feast Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -// Code generated by protoc-gen-go. DO NOT EDIT. -// versions: -// protoc-gen-go v1.25.0 -// protoc v3.12.4 -// source: feast/core/CoreService.proto - -package core - -import ( - context "context" - _ "github.com/feast-dev/feast/sdk/go/protos/tensorflow_metadata/proto/v0" - proto "github.com/golang/protobuf/proto" - _ "github.com/golang/protobuf/ptypes/timestamp" - grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" - protoreflect "google.golang.org/protobuf/reflect/protoreflect" - protoimpl "google.golang.org/protobuf/runtime/protoimpl" - reflect "reflect" - sync "sync" -) - -const ( - // Verify that this generated code is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) - // Verify that runtime/protoimpl is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) -) - -// This is a compile-time assertion that a sufficiently up-to-date version -// of the legacy proto package is being used. -const _ = proto.ProtoPackageIsVersion4 - -type UpdateStoreResponse_Status int32 - -const ( - // Existing store config matching the given store id is identical to the given store config. - UpdateStoreResponse_NO_CHANGE UpdateStoreResponse_Status = 0 - // New store created or existing config updated. - UpdateStoreResponse_UPDATED UpdateStoreResponse_Status = 1 -) - -// Enum value maps for UpdateStoreResponse_Status. -var ( - UpdateStoreResponse_Status_name = map[int32]string{ - 0: "NO_CHANGE", - 1: "UPDATED", - } - UpdateStoreResponse_Status_value = map[string]int32{ - "NO_CHANGE": 0, - "UPDATED": 1, - } -) - -func (x UpdateStoreResponse_Status) Enum() *UpdateStoreResponse_Status { - p := new(UpdateStoreResponse_Status) - *p = x - return p -} - -func (x UpdateStoreResponse_Status) String() string { - return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) -} - -func (UpdateStoreResponse_Status) Descriptor() protoreflect.EnumDescriptor { - return file_feast_core_CoreService_proto_enumTypes[0].Descriptor() -} - -func (UpdateStoreResponse_Status) Type() protoreflect.EnumType { - return &file_feast_core_CoreService_proto_enumTypes[0] -} - -func (x UpdateStoreResponse_Status) Number() protoreflect.EnumNumber { - return protoreflect.EnumNumber(x) -} - -// Deprecated: Use UpdateStoreResponse_Status.Descriptor instead. -func (UpdateStoreResponse_Status) EnumDescriptor() ([]byte, []int) { - return file_feast_core_CoreService_proto_rawDescGZIP(), []int{13, 0} -} - -// Request for a single entity -type GetEntityRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Name of entity (required). - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - // Name of project the entity belongs to. If omitted will default to 'default' project. - Project string `protobuf:"bytes,2,opt,name=project,proto3" json:"project,omitempty"` -} - -func (x *GetEntityRequest) Reset() { - *x = GetEntityRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_CoreService_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetEntityRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetEntityRequest) ProtoMessage() {} - -func (x *GetEntityRequest) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_CoreService_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetEntityRequest.ProtoReflect.Descriptor instead. -func (*GetEntityRequest) Descriptor() ([]byte, []int) { - return file_feast_core_CoreService_proto_rawDescGZIP(), []int{0} -} - -func (x *GetEntityRequest) GetName() string { - if x != nil { - return x.Name - } - return "" -} - -func (x *GetEntityRequest) GetProject() string { - if x != nil { - return x.Project - } - return "" -} - -// Response containing a single entity -type GetEntityResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Entity *Entity `protobuf:"bytes,1,opt,name=entity,proto3" json:"entity,omitempty"` -} - -func (x *GetEntityResponse) Reset() { - *x = GetEntityResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_CoreService_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetEntityResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetEntityResponse) ProtoMessage() {} - -func (x *GetEntityResponse) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_CoreService_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetEntityResponse.ProtoReflect.Descriptor instead. -func (*GetEntityResponse) Descriptor() ([]byte, []int) { - return file_feast_core_CoreService_proto_rawDescGZIP(), []int{1} -} - -func (x *GetEntityResponse) GetEntity() *Entity { - if x != nil { - return x.Entity - } - return nil -} - -// Retrieves details for all versions of a specific entity -type ListEntitiesRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Filter *ListEntitiesRequest_Filter `protobuf:"bytes,1,opt,name=filter,proto3" json:"filter,omitempty"` -} - -func (x *ListEntitiesRequest) Reset() { - *x = ListEntitiesRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_CoreService_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *ListEntitiesRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*ListEntitiesRequest) ProtoMessage() {} - -func (x *ListEntitiesRequest) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_CoreService_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use ListEntitiesRequest.ProtoReflect.Descriptor instead. -func (*ListEntitiesRequest) Descriptor() ([]byte, []int) { - return file_feast_core_CoreService_proto_rawDescGZIP(), []int{2} -} - -func (x *ListEntitiesRequest) GetFilter() *ListEntitiesRequest_Filter { - if x != nil { - return x.Filter - } - return nil -} - -type ListEntitiesResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Entities []*Entity `protobuf:"bytes,1,rep,name=entities,proto3" json:"entities,omitempty"` -} - -func (x *ListEntitiesResponse) Reset() { - *x = ListEntitiesResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_CoreService_proto_msgTypes[3] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *ListEntitiesResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*ListEntitiesResponse) ProtoMessage() {} - -func (x *ListEntitiesResponse) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_CoreService_proto_msgTypes[3] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use ListEntitiesResponse.ProtoReflect.Descriptor instead. -func (*ListEntitiesResponse) Descriptor() ([]byte, []int) { - return file_feast_core_CoreService_proto_rawDescGZIP(), []int{3} -} - -func (x *ListEntitiesResponse) GetEntities() []*Entity { - if x != nil { - return x.Entities - } - return nil -} - -type ListFeaturesRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Filter *ListFeaturesRequest_Filter `protobuf:"bytes,1,opt,name=filter,proto3" json:"filter,omitempty"` -} - -func (x *ListFeaturesRequest) Reset() { - *x = ListFeaturesRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_CoreService_proto_msgTypes[4] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *ListFeaturesRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*ListFeaturesRequest) ProtoMessage() {} - -func (x *ListFeaturesRequest) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_CoreService_proto_msgTypes[4] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use ListFeaturesRequest.ProtoReflect.Descriptor instead. -func (*ListFeaturesRequest) Descriptor() ([]byte, []int) { - return file_feast_core_CoreService_proto_rawDescGZIP(), []int{4} -} - -func (x *ListFeaturesRequest) GetFilter() *ListFeaturesRequest_Filter { - if x != nil { - return x.Filter - } - return nil -} - -type ListFeaturesResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Features map[string]*FeatureSpecV2 `protobuf:"bytes,2,rep,name=features,proto3" json:"features,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` -} - -func (x *ListFeaturesResponse) Reset() { - *x = ListFeaturesResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_CoreService_proto_msgTypes[5] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *ListFeaturesResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*ListFeaturesResponse) ProtoMessage() {} - -func (x *ListFeaturesResponse) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_CoreService_proto_msgTypes[5] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use ListFeaturesResponse.ProtoReflect.Descriptor instead. -func (*ListFeaturesResponse) Descriptor() ([]byte, []int) { - return file_feast_core_CoreService_proto_rawDescGZIP(), []int{5} -} - -func (x *ListFeaturesResponse) GetFeatures() map[string]*FeatureSpecV2 { - if x != nil { - return x.Features - } - return nil -} - -type ListStoresRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Filter *ListStoresRequest_Filter `protobuf:"bytes,1,opt,name=filter,proto3" json:"filter,omitempty"` -} - -func (x *ListStoresRequest) Reset() { - *x = ListStoresRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_CoreService_proto_msgTypes[6] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *ListStoresRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*ListStoresRequest) ProtoMessage() {} - -func (x *ListStoresRequest) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_CoreService_proto_msgTypes[6] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use ListStoresRequest.ProtoReflect.Descriptor instead. -func (*ListStoresRequest) Descriptor() ([]byte, []int) { - return file_feast_core_CoreService_proto_rawDescGZIP(), []int{6} -} - -func (x *ListStoresRequest) GetFilter() *ListStoresRequest_Filter { - if x != nil { - return x.Filter - } - return nil -} - -type ListStoresResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Store []*Store `protobuf:"bytes,1,rep,name=store,proto3" json:"store,omitempty"` -} - -func (x *ListStoresResponse) Reset() { - *x = ListStoresResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_CoreService_proto_msgTypes[7] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *ListStoresResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*ListStoresResponse) ProtoMessage() {} - -func (x *ListStoresResponse) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_CoreService_proto_msgTypes[7] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use ListStoresResponse.ProtoReflect.Descriptor instead. -func (*ListStoresResponse) Descriptor() ([]byte, []int) { - return file_feast_core_CoreService_proto_rawDescGZIP(), []int{7} -} - -func (x *ListStoresResponse) GetStore() []*Store { - if x != nil { - return x.Store - } - return nil -} - -type ApplyEntityRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // If project is unspecified, will default to 'default' project. - // If project specified does not exist, the project would be automatically created. - Spec *EntitySpecV2 `protobuf:"bytes,1,opt,name=spec,proto3" json:"spec,omitempty"` - // Name of project that this entity belongs to. - Project string `protobuf:"bytes,2,opt,name=project,proto3" json:"project,omitempty"` -} - -func (x *ApplyEntityRequest) Reset() { - *x = ApplyEntityRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_CoreService_proto_msgTypes[8] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *ApplyEntityRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*ApplyEntityRequest) ProtoMessage() {} - -func (x *ApplyEntityRequest) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_CoreService_proto_msgTypes[8] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use ApplyEntityRequest.ProtoReflect.Descriptor instead. -func (*ApplyEntityRequest) Descriptor() ([]byte, []int) { - return file_feast_core_CoreService_proto_rawDescGZIP(), []int{8} -} - -func (x *ApplyEntityRequest) GetSpec() *EntitySpecV2 { - if x != nil { - return x.Spec - } - return nil -} - -func (x *ApplyEntityRequest) GetProject() string { - if x != nil { - return x.Project - } - return "" -} - -type ApplyEntityResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Entity *Entity `protobuf:"bytes,1,opt,name=entity,proto3" json:"entity,omitempty"` -} - -func (x *ApplyEntityResponse) Reset() { - *x = ApplyEntityResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_CoreService_proto_msgTypes[9] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *ApplyEntityResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*ApplyEntityResponse) ProtoMessage() {} - -func (x *ApplyEntityResponse) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_CoreService_proto_msgTypes[9] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use ApplyEntityResponse.ProtoReflect.Descriptor instead. -func (*ApplyEntityResponse) Descriptor() ([]byte, []int) { - return file_feast_core_CoreService_proto_rawDescGZIP(), []int{9} -} - -func (x *ApplyEntityResponse) GetEntity() *Entity { - if x != nil { - return x.Entity - } - return nil -} - -type GetFeastCoreVersionRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields -} - -func (x *GetFeastCoreVersionRequest) Reset() { - *x = GetFeastCoreVersionRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_CoreService_proto_msgTypes[10] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetFeastCoreVersionRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetFeastCoreVersionRequest) ProtoMessage() {} - -func (x *GetFeastCoreVersionRequest) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_CoreService_proto_msgTypes[10] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetFeastCoreVersionRequest.ProtoReflect.Descriptor instead. -func (*GetFeastCoreVersionRequest) Descriptor() ([]byte, []int) { - return file_feast_core_CoreService_proto_rawDescGZIP(), []int{10} -} - -type GetFeastCoreVersionResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Version string `protobuf:"bytes,1,opt,name=version,proto3" json:"version,omitempty"` -} - -func (x *GetFeastCoreVersionResponse) Reset() { - *x = GetFeastCoreVersionResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_CoreService_proto_msgTypes[11] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetFeastCoreVersionResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetFeastCoreVersionResponse) ProtoMessage() {} - -func (x *GetFeastCoreVersionResponse) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_CoreService_proto_msgTypes[11] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetFeastCoreVersionResponse.ProtoReflect.Descriptor instead. -func (*GetFeastCoreVersionResponse) Descriptor() ([]byte, []int) { - return file_feast_core_CoreService_proto_rawDescGZIP(), []int{11} -} - -func (x *GetFeastCoreVersionResponse) GetVersion() string { - if x != nil { - return x.Version - } - return "" -} - -type UpdateStoreRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Store *Store `protobuf:"bytes,1,opt,name=store,proto3" json:"store,omitempty"` -} - -func (x *UpdateStoreRequest) Reset() { - *x = UpdateStoreRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_CoreService_proto_msgTypes[12] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *UpdateStoreRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*UpdateStoreRequest) ProtoMessage() {} - -func (x *UpdateStoreRequest) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_CoreService_proto_msgTypes[12] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use UpdateStoreRequest.ProtoReflect.Descriptor instead. -func (*UpdateStoreRequest) Descriptor() ([]byte, []int) { - return file_feast_core_CoreService_proto_rawDescGZIP(), []int{12} -} - -func (x *UpdateStoreRequest) GetStore() *Store { - if x != nil { - return x.Store - } - return nil -} - -type UpdateStoreResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Store *Store `protobuf:"bytes,1,opt,name=store,proto3" json:"store,omitempty"` - Status UpdateStoreResponse_Status `protobuf:"varint,2,opt,name=status,proto3,enum=feast.core.UpdateStoreResponse_Status" json:"status,omitempty"` -} - -func (x *UpdateStoreResponse) Reset() { - *x = UpdateStoreResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_CoreService_proto_msgTypes[13] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *UpdateStoreResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*UpdateStoreResponse) ProtoMessage() {} - -func (x *UpdateStoreResponse) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_CoreService_proto_msgTypes[13] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use UpdateStoreResponse.ProtoReflect.Descriptor instead. -func (*UpdateStoreResponse) Descriptor() ([]byte, []int) { - return file_feast_core_CoreService_proto_rawDescGZIP(), []int{13} -} - -func (x *UpdateStoreResponse) GetStore() *Store { - if x != nil { - return x.Store - } - return nil -} - -func (x *UpdateStoreResponse) GetStatus() UpdateStoreResponse_Status { - if x != nil { - return x.Status - } - return UpdateStoreResponse_NO_CHANGE -} - -// Request to create a project -type CreateProjectRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Name of project (required) - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` -} - -func (x *CreateProjectRequest) Reset() { - *x = CreateProjectRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_CoreService_proto_msgTypes[14] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *CreateProjectRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*CreateProjectRequest) ProtoMessage() {} - -func (x *CreateProjectRequest) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_CoreService_proto_msgTypes[14] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use CreateProjectRequest.ProtoReflect.Descriptor instead. -func (*CreateProjectRequest) Descriptor() ([]byte, []int) { - return file_feast_core_CoreService_proto_rawDescGZIP(), []int{14} -} - -func (x *CreateProjectRequest) GetName() string { - if x != nil { - return x.Name - } - return "" -} - -// Response for creation of a project -type CreateProjectResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields -} - -func (x *CreateProjectResponse) Reset() { - *x = CreateProjectResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_CoreService_proto_msgTypes[15] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *CreateProjectResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*CreateProjectResponse) ProtoMessage() {} - -func (x *CreateProjectResponse) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_CoreService_proto_msgTypes[15] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use CreateProjectResponse.ProtoReflect.Descriptor instead. -func (*CreateProjectResponse) Descriptor() ([]byte, []int) { - return file_feast_core_CoreService_proto_rawDescGZIP(), []int{15} -} - -// Request for the archival of a project -type ArchiveProjectRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Name of project to be archived - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` -} - -func (x *ArchiveProjectRequest) Reset() { - *x = ArchiveProjectRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_CoreService_proto_msgTypes[16] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *ArchiveProjectRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*ArchiveProjectRequest) ProtoMessage() {} - -func (x *ArchiveProjectRequest) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_CoreService_proto_msgTypes[16] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use ArchiveProjectRequest.ProtoReflect.Descriptor instead. -func (*ArchiveProjectRequest) Descriptor() ([]byte, []int) { - return file_feast_core_CoreService_proto_rawDescGZIP(), []int{16} -} - -func (x *ArchiveProjectRequest) GetName() string { - if x != nil { - return x.Name - } - return "" -} - -// Response for archival of a project -type ArchiveProjectResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields -} - -func (x *ArchiveProjectResponse) Reset() { - *x = ArchiveProjectResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_CoreService_proto_msgTypes[17] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *ArchiveProjectResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*ArchiveProjectResponse) ProtoMessage() {} - -func (x *ArchiveProjectResponse) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_CoreService_proto_msgTypes[17] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use ArchiveProjectResponse.ProtoReflect.Descriptor instead. -func (*ArchiveProjectResponse) Descriptor() ([]byte, []int) { - return file_feast_core_CoreService_proto_rawDescGZIP(), []int{17} -} - -// Request for listing of projects -type ListProjectsRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields -} - -func (x *ListProjectsRequest) Reset() { - *x = ListProjectsRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_CoreService_proto_msgTypes[18] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *ListProjectsRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*ListProjectsRequest) ProtoMessage() {} - -func (x *ListProjectsRequest) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_CoreService_proto_msgTypes[18] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use ListProjectsRequest.ProtoReflect.Descriptor instead. -func (*ListProjectsRequest) Descriptor() ([]byte, []int) { - return file_feast_core_CoreService_proto_rawDescGZIP(), []int{18} -} - -// Response for listing of projects -type ListProjectsResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // List of project names (archived projects are filtered out) - Projects []string `protobuf:"bytes,1,rep,name=projects,proto3" json:"projects,omitempty"` -} - -func (x *ListProjectsResponse) Reset() { - *x = ListProjectsResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_CoreService_proto_msgTypes[19] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *ListProjectsResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*ListProjectsResponse) ProtoMessage() {} - -func (x *ListProjectsResponse) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_CoreService_proto_msgTypes[19] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use ListProjectsResponse.ProtoReflect.Descriptor instead. -func (*ListProjectsResponse) Descriptor() ([]byte, []int) { - return file_feast_core_CoreService_proto_rawDescGZIP(), []int{19} -} - -func (x *ListProjectsResponse) GetProjects() []string { - if x != nil { - return x.Projects - } - return nil -} - -type UpdateFeatureSetStatusResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields -} - -func (x *UpdateFeatureSetStatusResponse) Reset() { - *x = UpdateFeatureSetStatusResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_CoreService_proto_msgTypes[20] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *UpdateFeatureSetStatusResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*UpdateFeatureSetStatusResponse) ProtoMessage() {} - -func (x *UpdateFeatureSetStatusResponse) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_CoreService_proto_msgTypes[20] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use UpdateFeatureSetStatusResponse.ProtoReflect.Descriptor instead. -func (*UpdateFeatureSetStatusResponse) Descriptor() ([]byte, []int) { - return file_feast_core_CoreService_proto_rawDescGZIP(), []int{20} -} - -type ApplyFeatureTableRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Optional. Name of the Project to apply the Feature Table to. - // If unspecified, will apply FeatureTable to the default project. - Project string `protobuf:"bytes,1,opt,name=project,proto3" json:"project,omitempty"` - // Feature Table specification to apply - TableSpec *FeatureTableSpec `protobuf:"bytes,2,opt,name=table_spec,json=tableSpec,proto3" json:"table_spec,omitempty"` -} - -func (x *ApplyFeatureTableRequest) Reset() { - *x = ApplyFeatureTableRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_CoreService_proto_msgTypes[21] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *ApplyFeatureTableRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*ApplyFeatureTableRequest) ProtoMessage() {} - -func (x *ApplyFeatureTableRequest) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_CoreService_proto_msgTypes[21] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use ApplyFeatureTableRequest.ProtoReflect.Descriptor instead. -func (*ApplyFeatureTableRequest) Descriptor() ([]byte, []int) { - return file_feast_core_CoreService_proto_rawDescGZIP(), []int{21} -} - -func (x *ApplyFeatureTableRequest) GetProject() string { - if x != nil { - return x.Project - } - return "" -} - -func (x *ApplyFeatureTableRequest) GetTableSpec() *FeatureTableSpec { - if x != nil { - return x.TableSpec - } - return nil -} - -type ApplyFeatureTableResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Table *FeatureTable `protobuf:"bytes,1,opt,name=table,proto3" json:"table,omitempty"` -} - -func (x *ApplyFeatureTableResponse) Reset() { - *x = ApplyFeatureTableResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_CoreService_proto_msgTypes[22] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *ApplyFeatureTableResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*ApplyFeatureTableResponse) ProtoMessage() {} - -func (x *ApplyFeatureTableResponse) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_CoreService_proto_msgTypes[22] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use ApplyFeatureTableResponse.ProtoReflect.Descriptor instead. -func (*ApplyFeatureTableResponse) Descriptor() ([]byte, []int) { - return file_feast_core_CoreService_proto_rawDescGZIP(), []int{22} -} - -func (x *ApplyFeatureTableResponse) GetTable() *FeatureTable { - if x != nil { - return x.Table - } - return nil -} - -type GetFeatureTableRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Optional. Name of the Project to retrieve the Feature Table from. - // If unspecified, will apply FeatureTable to the default project. - Project string `protobuf:"bytes,1,opt,name=project,proto3" json:"project,omitempty"` - // Name of the FeatureTable to retrieve. - Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` -} - -func (x *GetFeatureTableRequest) Reset() { - *x = GetFeatureTableRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_CoreService_proto_msgTypes[23] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetFeatureTableRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetFeatureTableRequest) ProtoMessage() {} - -func (x *GetFeatureTableRequest) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_CoreService_proto_msgTypes[23] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetFeatureTableRequest.ProtoReflect.Descriptor instead. -func (*GetFeatureTableRequest) Descriptor() ([]byte, []int) { - return file_feast_core_CoreService_proto_rawDescGZIP(), []int{23} -} - -func (x *GetFeatureTableRequest) GetProject() string { - if x != nil { - return x.Project - } - return "" -} - -func (x *GetFeatureTableRequest) GetName() string { - if x != nil { - return x.Name - } - return "" -} - -type GetFeatureTableResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // The Feature Table retrieved. - Table *FeatureTable `protobuf:"bytes,1,opt,name=table,proto3" json:"table,omitempty"` -} - -func (x *GetFeatureTableResponse) Reset() { - *x = GetFeatureTableResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_CoreService_proto_msgTypes[24] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetFeatureTableResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetFeatureTableResponse) ProtoMessage() {} - -func (x *GetFeatureTableResponse) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_CoreService_proto_msgTypes[24] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetFeatureTableResponse.ProtoReflect.Descriptor instead. -func (*GetFeatureTableResponse) Descriptor() ([]byte, []int) { - return file_feast_core_CoreService_proto_rawDescGZIP(), []int{24} -} - -func (x *GetFeatureTableResponse) GetTable() *FeatureTable { - if x != nil { - return x.Table - } - return nil -} - -type ListFeatureTablesRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Filter used when listing Feature Tables - Filter *ListFeatureTablesRequest_Filter `protobuf:"bytes,1,opt,name=filter,proto3" json:"filter,omitempty"` -} - -func (x *ListFeatureTablesRequest) Reset() { - *x = ListFeatureTablesRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_CoreService_proto_msgTypes[25] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *ListFeatureTablesRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*ListFeatureTablesRequest) ProtoMessage() {} - -func (x *ListFeatureTablesRequest) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_CoreService_proto_msgTypes[25] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use ListFeatureTablesRequest.ProtoReflect.Descriptor instead. -func (*ListFeatureTablesRequest) Descriptor() ([]byte, []int) { - return file_feast_core_CoreService_proto_rawDescGZIP(), []int{25} -} - -func (x *ListFeatureTablesRequest) GetFilter() *ListFeatureTablesRequest_Filter { - if x != nil { - return x.Filter - } - return nil -} - -type ListFeatureTablesResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // List of matching Feature Tables - Tables []*FeatureTable `protobuf:"bytes,1,rep,name=tables,proto3" json:"tables,omitempty"` -} - -func (x *ListFeatureTablesResponse) Reset() { - *x = ListFeatureTablesResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_CoreService_proto_msgTypes[26] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *ListFeatureTablesResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*ListFeatureTablesResponse) ProtoMessage() {} - -func (x *ListFeatureTablesResponse) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_CoreService_proto_msgTypes[26] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use ListFeatureTablesResponse.ProtoReflect.Descriptor instead. -func (*ListFeatureTablesResponse) Descriptor() ([]byte, []int) { - return file_feast_core_CoreService_proto_rawDescGZIP(), []int{26} -} - -func (x *ListFeatureTablesResponse) GetTables() []*FeatureTable { - if x != nil { - return x.Tables - } - return nil -} - -type DeleteFeatureTableRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Optional. Name of the Project to delete the Feature Table from. - // If unspecified, will delete FeatureTable from the default project. - Project string `protobuf:"bytes,1,opt,name=project,proto3" json:"project,omitempty"` - // Name of the FeatureTable to delete. - Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` -} - -func (x *DeleteFeatureTableRequest) Reset() { - *x = DeleteFeatureTableRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_CoreService_proto_msgTypes[27] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *DeleteFeatureTableRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*DeleteFeatureTableRequest) ProtoMessage() {} - -func (x *DeleteFeatureTableRequest) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_CoreService_proto_msgTypes[27] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use DeleteFeatureTableRequest.ProtoReflect.Descriptor instead. -func (*DeleteFeatureTableRequest) Descriptor() ([]byte, []int) { - return file_feast_core_CoreService_proto_rawDescGZIP(), []int{27} -} - -func (x *DeleteFeatureTableRequest) GetProject() string { - if x != nil { - return x.Project - } - return "" -} - -func (x *DeleteFeatureTableRequest) GetName() string { - if x != nil { - return x.Name - } - return "" -} - -type DeleteFeatureTableResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields -} - -func (x *DeleteFeatureTableResponse) Reset() { - *x = DeleteFeatureTableResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_CoreService_proto_msgTypes[28] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *DeleteFeatureTableResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*DeleteFeatureTableResponse) ProtoMessage() {} - -func (x *DeleteFeatureTableResponse) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_CoreService_proto_msgTypes[28] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use DeleteFeatureTableResponse.ProtoReflect.Descriptor instead. -func (*DeleteFeatureTableResponse) Descriptor() ([]byte, []int) { - return file_feast_core_CoreService_proto_rawDescGZIP(), []int{28} -} - -type ListEntitiesRequest_Filter struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Optional. Specifies the name of the project to list Entities in. - // It is NOT possible to provide an asterisk with a string in order to do pattern matching. - // If unspecified, this field will default to the default project 'default'. - Project string `protobuf:"bytes,3,opt,name=project,proto3" json:"project,omitempty"` - // Optional. User defined metadata for entity. - // Entities with all matching labels will be returned. - Labels map[string]string `protobuf:"bytes,4,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` -} - -func (x *ListEntitiesRequest_Filter) Reset() { - *x = ListEntitiesRequest_Filter{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_CoreService_proto_msgTypes[29] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *ListEntitiesRequest_Filter) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*ListEntitiesRequest_Filter) ProtoMessage() {} - -func (x *ListEntitiesRequest_Filter) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_CoreService_proto_msgTypes[29] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use ListEntitiesRequest_Filter.ProtoReflect.Descriptor instead. -func (*ListEntitiesRequest_Filter) Descriptor() ([]byte, []int) { - return file_feast_core_CoreService_proto_rawDescGZIP(), []int{2, 0} -} - -func (x *ListEntitiesRequest_Filter) GetProject() string { - if x != nil { - return x.Project - } - return "" -} - -func (x *ListEntitiesRequest_Filter) GetLabels() map[string]string { - if x != nil { - return x.Labels - } - return nil -} - -type ListFeaturesRequest_Filter struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // User defined metadata for feature. - // Features with all matching labels will be returned. - Labels map[string]string `protobuf:"bytes,1,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - // List of entities contained within the featureSet that the feature belongs to. - // Only feature tables with these entities will be searched for features. - Entities []string `protobuf:"bytes,2,rep,name=entities,proto3" json:"entities,omitempty"` - // Name of project that the feature tables belongs to. Filtering on projects is disabled. - // It is NOT possible to provide an asterisk with a string in order to do pattern matching. - // If unspecified this field will default to the default project 'default'. - Project string `protobuf:"bytes,3,opt,name=project,proto3" json:"project,omitempty"` -} - -func (x *ListFeaturesRequest_Filter) Reset() { - *x = ListFeaturesRequest_Filter{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_CoreService_proto_msgTypes[31] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *ListFeaturesRequest_Filter) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*ListFeaturesRequest_Filter) ProtoMessage() {} - -func (x *ListFeaturesRequest_Filter) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_CoreService_proto_msgTypes[31] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use ListFeaturesRequest_Filter.ProtoReflect.Descriptor instead. -func (*ListFeaturesRequest_Filter) Descriptor() ([]byte, []int) { - return file_feast_core_CoreService_proto_rawDescGZIP(), []int{4, 0} -} - -func (x *ListFeaturesRequest_Filter) GetLabels() map[string]string { - if x != nil { - return x.Labels - } - return nil -} - -func (x *ListFeaturesRequest_Filter) GetEntities() []string { - if x != nil { - return x.Entities - } - return nil -} - -func (x *ListFeaturesRequest_Filter) GetProject() string { - if x != nil { - return x.Project - } - return "" -} - -type ListStoresRequest_Filter struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Name of desired store. Regex is not supported in this query. - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` -} - -func (x *ListStoresRequest_Filter) Reset() { - *x = ListStoresRequest_Filter{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_CoreService_proto_msgTypes[34] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *ListStoresRequest_Filter) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*ListStoresRequest_Filter) ProtoMessage() {} - -func (x *ListStoresRequest_Filter) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_CoreService_proto_msgTypes[34] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use ListStoresRequest_Filter.ProtoReflect.Descriptor instead. -func (*ListStoresRequest_Filter) Descriptor() ([]byte, []int) { - return file_feast_core_CoreService_proto_rawDescGZIP(), []int{6, 0} -} - -func (x *ListStoresRequest_Filter) GetName() string { - if x != nil { - return x.Name - } - return "" -} - -type ListFeatureTablesRequest_Filter struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Optional. Specifies the name of the project to list Feature Tables in. - // If unspecified would list Feature Tables in the default project. - Project string `protobuf:"bytes,1,opt,name=project,proto3" json:"project,omitempty"` - // Optional. Feature Tables with all matching labels will be returned. - // If unspecified would list Feature Tables without filtering by labels. - Labels map[string]string `protobuf:"bytes,3,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` -} - -func (x *ListFeatureTablesRequest_Filter) Reset() { - *x = ListFeatureTablesRequest_Filter{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_CoreService_proto_msgTypes[35] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *ListFeatureTablesRequest_Filter) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*ListFeatureTablesRequest_Filter) ProtoMessage() {} - -func (x *ListFeatureTablesRequest_Filter) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_CoreService_proto_msgTypes[35] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use ListFeatureTablesRequest_Filter.ProtoReflect.Descriptor instead. -func (*ListFeatureTablesRequest_Filter) Descriptor() ([]byte, []int) { - return file_feast_core_CoreService_proto_rawDescGZIP(), []int{25, 0} -} - -func (x *ListFeatureTablesRequest_Filter) GetProject() string { - if x != nil { - return x.Project - } - return "" -} - -func (x *ListFeatureTablesRequest_Filter) GetLabels() map[string]string { - if x != nil { - return x.Labels - } - return nil -} - -var File_feast_core_CoreService_proto protoreflect.FileDescriptor - -var file_feast_core_CoreService_proto_rawDesc = []byte{ - 0x0a, 0x1c, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x43, 0x6f, 0x72, - 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0a, - 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, - 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, - 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x2d, 0x74, 0x65, 0x6e, - 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, - 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x76, 0x30, 0x2f, 0x73, 0x74, 0x61, 0x74, 0x69, 0x73, - 0x74, 0x69, 0x63, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x17, 0x66, 0x65, 0x61, 0x73, - 0x74, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x45, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x1a, 0x18, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, - 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1d, 0x66, - 0x65, 0x61, 0x73, 0x74, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, - 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x16, 0x66, 0x65, - 0x61, 0x73, 0x74, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x40, 0x0a, 0x10, 0x47, 0x65, 0x74, 0x45, 0x6e, 0x74, 0x69, 0x74, - 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x18, 0x0a, 0x07, - 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, - 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x22, 0x3f, 0x0a, 0x11, 0x47, 0x65, 0x74, 0x45, 0x6e, 0x74, - 0x69, 0x74, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2a, 0x0a, 0x06, 0x65, - 0x6e, 0x74, 0x69, 0x74, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x66, 0x65, - 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x45, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x52, - 0x06, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x22, 0x81, 0x02, 0x0a, 0x13, 0x4c, 0x69, 0x73, 0x74, - 0x45, 0x6e, 0x74, 0x69, 0x74, 0x69, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, - 0x3e, 0x0a, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x26, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x73, - 0x74, 0x45, 0x6e, 0x74, 0x69, 0x74, 0x69, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x2e, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x52, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x1a, - 0xa9, 0x01, 0x0a, 0x06, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x72, - 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, 0x72, 0x6f, - 0x6a, 0x65, 0x63, 0x74, 0x12, 0x4a, 0x0a, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, 0x04, - 0x20, 0x03, 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, - 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x45, 0x6e, 0x74, 0x69, 0x74, 0x69, 0x65, 0x73, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x2e, 0x4c, 0x61, 0x62, - 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, - 0x1a, 0x39, 0x0a, 0x0b, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, - 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, - 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x46, 0x0a, 0x14, 0x4c, - 0x69, 0x73, 0x74, 0x45, 0x6e, 0x74, 0x69, 0x74, 0x69, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x12, 0x2e, 0x0a, 0x08, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x69, 0x65, 0x73, 0x18, - 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, - 0x72, 0x65, 0x2e, 0x45, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x52, 0x08, 0x65, 0x6e, 0x74, 0x69, 0x74, - 0x69, 0x65, 0x73, 0x22, 0x9d, 0x02, 0x0a, 0x13, 0x4c, 0x69, 0x73, 0x74, 0x46, 0x65, 0x61, 0x74, - 0x75, 0x72, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x3e, 0x0a, 0x06, 0x66, - 0x69, 0x6c, 0x74, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x66, 0x65, - 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x46, 0x65, 0x61, - 0x74, 0x75, 0x72, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x46, 0x69, 0x6c, - 0x74, 0x65, 0x72, 0x52, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x1a, 0xc5, 0x01, 0x0a, 0x06, - 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x4a, 0x0a, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, - 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, - 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x2e, 0x4c, - 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65, - 0x6c, 0x73, 0x12, 0x1a, 0x0a, 0x08, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x69, 0x65, 0x73, 0x18, 0x02, - 0x20, 0x03, 0x28, 0x09, 0x52, 0x08, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x69, 0x65, 0x73, 0x12, 0x18, - 0x0a, 0x07, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x07, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x1a, 0x39, 0x0a, 0x0b, 0x4c, 0x61, 0x62, 0x65, - 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, - 0x02, 0x38, 0x01, 0x22, 0xc0, 0x01, 0x0a, 0x14, 0x4c, 0x69, 0x73, 0x74, 0x46, 0x65, 0x61, 0x74, - 0x75, 0x72, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x4a, 0x0a, 0x08, - 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2e, - 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, - 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x2e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x08, - 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x1a, 0x56, 0x0a, 0x0d, 0x46, 0x65, 0x61, 0x74, - 0x75, 0x72, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x2f, 0x0a, 0x05, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x66, 0x65, 0x61, - 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, - 0x70, 0x65, 0x63, 0x56, 0x32, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, - 0x4a, 0x04, 0x08, 0x01, 0x10, 0x02, 0x22, 0x6f, 0x0a, 0x11, 0x4c, 0x69, 0x73, 0x74, 0x53, 0x74, - 0x6f, 0x72, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x3c, 0x0a, 0x06, 0x66, - 0x69, 0x6c, 0x74, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x66, 0x65, - 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x53, 0x74, 0x6f, - 0x72, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x46, 0x69, 0x6c, 0x74, 0x65, - 0x72, 0x52, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x1a, 0x1c, 0x0a, 0x06, 0x46, 0x69, 0x6c, - 0x74, 0x65, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x3d, 0x0a, 0x12, 0x4c, 0x69, 0x73, 0x74, 0x53, - 0x74, 0x6f, 0x72, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x27, 0x0a, - 0x05, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x66, - 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x52, - 0x05, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x22, 0x5c, 0x0a, 0x12, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x45, - 0x6e, 0x74, 0x69, 0x74, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2c, 0x0a, 0x04, - 0x73, 0x70, 0x65, 0x63, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x66, 0x65, 0x61, - 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x45, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x53, 0x70, - 0x65, 0x63, 0x56, 0x32, 0x52, 0x04, 0x73, 0x70, 0x65, 0x63, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x72, - 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, 0x72, 0x6f, - 0x6a, 0x65, 0x63, 0x74, 0x22, 0x41, 0x0a, 0x13, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x45, 0x6e, 0x74, - 0x69, 0x74, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2a, 0x0a, 0x06, 0x65, - 0x6e, 0x74, 0x69, 0x74, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x66, 0x65, - 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x45, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x52, - 0x06, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x22, 0x1c, 0x0a, 0x1a, 0x47, 0x65, 0x74, 0x46, 0x65, - 0x61, 0x73, 0x74, 0x43, 0x6f, 0x72, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x37, 0x0a, 0x1b, 0x47, 0x65, 0x74, 0x46, 0x65, 0x61, 0x73, - 0x74, 0x43, 0x6f, 0x72, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, - 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x3d, - 0x0a, 0x12, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x12, 0x27, 0x0a, 0x05, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, - 0x2e, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x52, 0x05, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x22, 0xa4, 0x01, - 0x0a, 0x13, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x27, 0x0a, 0x05, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, - 0x65, 0x2e, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x52, 0x05, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x12, 0x3e, - 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x26, - 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x55, 0x70, 0x64, 0x61, - 0x74, 0x65, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, - 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x24, - 0x0a, 0x06, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x0d, 0x0a, 0x09, 0x4e, 0x4f, 0x5f, 0x43, - 0x48, 0x41, 0x4e, 0x47, 0x45, 0x10, 0x00, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x50, 0x44, 0x41, 0x54, - 0x45, 0x44, 0x10, 0x01, 0x22, 0x2a, 0x0a, 0x14, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, 0x72, - 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, - 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, - 0x22, 0x17, 0x0a, 0x15, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, - 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x2b, 0x0a, 0x15, 0x41, 0x72, 0x63, - 0x68, 0x69, 0x76, 0x65, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x18, 0x0a, 0x16, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, - 0x65, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x22, 0x15, 0x0a, 0x13, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x73, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x32, 0x0a, 0x14, 0x4c, 0x69, 0x73, 0x74, 0x50, - 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, - 0x1a, 0x0a, 0x08, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, - 0x09, 0x52, 0x08, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x73, 0x22, 0x20, 0x0a, 0x1e, 0x55, - 0x70, 0x64, 0x61, 0x74, 0x65, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x53, - 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x71, 0x0a, - 0x18, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x54, 0x61, 0x62, - 0x6c, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x72, 0x6f, - 0x6a, 0x65, 0x63, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, 0x72, 0x6f, 0x6a, - 0x65, 0x63, 0x74, 0x12, 0x3b, 0x0a, 0x0a, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x73, 0x70, 0x65, - 0x63, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, - 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x54, 0x61, 0x62, 0x6c, - 0x65, 0x53, 0x70, 0x65, 0x63, 0x52, 0x09, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x53, 0x70, 0x65, 0x63, - 0x22, 0x4b, 0x0a, 0x19, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, - 0x54, 0x61, 0x62, 0x6c, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2e, 0x0a, - 0x05, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x66, - 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, - 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x52, 0x05, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x22, 0x46, 0x0a, - 0x16, 0x47, 0x65, 0x74, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x72, 0x6f, 0x6a, 0x65, - 0x63, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, - 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x49, 0x0a, 0x17, 0x47, 0x65, 0x74, 0x46, 0x65, 0x61, 0x74, - 0x75, 0x72, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x12, 0x2e, 0x0a, 0x05, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x18, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x46, 0x65, 0x61, - 0x74, 0x75, 0x72, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x52, 0x05, 0x74, 0x61, 0x62, 0x6c, 0x65, - 0x22, 0x90, 0x02, 0x0a, 0x18, 0x4c, 0x69, 0x73, 0x74, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, - 0x54, 0x61, 0x62, 0x6c, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x43, 0x0a, - 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2b, 0x2e, - 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x46, - 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x2e, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x52, 0x06, 0x66, 0x69, 0x6c, 0x74, - 0x65, 0x72, 0x1a, 0xae, 0x01, 0x0a, 0x06, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x18, 0x0a, - 0x07, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, - 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x12, 0x4f, 0x0a, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, - 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x37, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, - 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, - 0x54, 0x61, 0x62, 0x6c, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x46, 0x69, - 0x6c, 0x74, 0x65, 0x72, 0x2e, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, - 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x1a, 0x39, 0x0a, 0x0b, 0x4c, 0x61, 0x62, 0x65, - 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, - 0x02, 0x38, 0x01, 0x22, 0x4d, 0x0a, 0x19, 0x4c, 0x69, 0x73, 0x74, 0x46, 0x65, 0x61, 0x74, 0x75, - 0x72, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x12, 0x30, 0x0a, 0x06, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x18, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x46, 0x65, - 0x61, 0x74, 0x75, 0x72, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x52, 0x06, 0x74, 0x61, 0x62, 0x6c, - 0x65, 0x73, 0x22, 0x49, 0x0a, 0x19, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x46, 0x65, 0x61, 0x74, - 0x75, 0x72, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, - 0x18, 0x0a, 0x07, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x07, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, - 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x1c, 0x0a, - 0x1a, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x54, 0x61, - 0x62, 0x6c, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x32, 0xd9, 0x09, 0x0a, 0x0b, - 0x43, 0x6f, 0x72, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x66, 0x0a, 0x13, 0x47, - 0x65, 0x74, 0x46, 0x65, 0x61, 0x73, 0x74, 0x43, 0x6f, 0x72, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, - 0x6f, 0x6e, 0x12, 0x26, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, - 0x47, 0x65, 0x74, 0x46, 0x65, 0x61, 0x73, 0x74, 0x43, 0x6f, 0x72, 0x65, 0x56, 0x65, 0x72, 0x73, - 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x27, 0x2e, 0x66, 0x65, 0x61, - 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x47, 0x65, 0x74, 0x46, 0x65, 0x61, 0x73, 0x74, - 0x43, 0x6f, 0x72, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x12, 0x48, 0x0a, 0x09, 0x47, 0x65, 0x74, 0x45, 0x6e, 0x74, 0x69, 0x74, 0x79, - 0x12, 0x1c, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x47, 0x65, - 0x74, 0x45, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1d, - 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x47, 0x65, 0x74, 0x45, - 0x6e, 0x74, 0x69, 0x74, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x51, 0x0a, - 0x0c, 0x4c, 0x69, 0x73, 0x74, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x12, 0x1f, 0x2e, - 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x46, - 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, - 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, - 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x12, 0x4b, 0x0a, 0x0a, 0x4c, 0x69, 0x73, 0x74, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x73, 0x12, 0x1d, - 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, - 0x53, 0x74, 0x6f, 0x72, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, - 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x53, - 0x74, 0x6f, 0x72, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x4e, 0x0a, - 0x0b, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x45, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x12, 0x1e, 0x2e, 0x66, - 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x45, - 0x6e, 0x74, 0x69, 0x74, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x66, - 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x45, - 0x6e, 0x74, 0x69, 0x74, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x51, 0x0a, - 0x0c, 0x4c, 0x69, 0x73, 0x74, 0x45, 0x6e, 0x74, 0x69, 0x74, 0x69, 0x65, 0x73, 0x12, 0x1f, 0x2e, - 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x45, - 0x6e, 0x74, 0x69, 0x74, 0x69, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, - 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, - 0x45, 0x6e, 0x74, 0x69, 0x74, 0x69, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x12, 0x4e, 0x0a, 0x0b, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x12, - 0x1e, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x55, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, - 0x1f, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x55, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x12, 0x54, 0x0a, 0x0d, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, - 0x74, 0x12, 0x20, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x43, - 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x1a, 0x21, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, - 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x65, - 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x57, 0x0a, 0x0e, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, - 0x65, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x12, 0x21, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, - 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x50, 0x72, 0x6f, - 0x6a, 0x65, 0x63, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x22, 0x2e, 0x66, 0x65, - 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, - 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, - 0x51, 0x0a, 0x0c, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x73, 0x12, - 0x1f, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x73, - 0x74, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x1a, 0x20, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, - 0x73, 0x74, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, - 0x73, 0x65, 0x12, 0x60, 0x0a, 0x11, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x46, 0x65, 0x61, 0x74, 0x75, - 0x72, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x12, 0x24, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, - 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, - 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x25, 0x2e, - 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x41, 0x70, 0x70, 0x6c, 0x79, - 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x52, 0x65, 0x73, 0x70, - 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x60, 0x0a, 0x11, 0x4c, 0x69, 0x73, 0x74, 0x46, 0x65, 0x61, 0x74, - 0x75, 0x72, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x73, 0x12, 0x24, 0x2e, 0x66, 0x65, 0x61, 0x73, - 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x46, 0x65, 0x61, 0x74, 0x75, - 0x72, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, - 0x25, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x73, - 0x74, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x73, 0x52, 0x65, - 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x5a, 0x0a, 0x0f, 0x47, 0x65, 0x74, 0x46, 0x65, 0x61, - 0x74, 0x75, 0x72, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x12, 0x22, 0x2e, 0x66, 0x65, 0x61, 0x73, - 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x47, 0x65, 0x74, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, - 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x23, 0x2e, - 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x47, 0x65, 0x74, 0x46, 0x65, - 0x61, 0x74, 0x75, 0x72, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, - 0x73, 0x65, 0x12, 0x63, 0x0a, 0x12, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x46, 0x65, 0x61, 0x74, - 0x75, 0x72, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x12, 0x25, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, - 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x46, 0x65, 0x61, 0x74, - 0x75, 0x72, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, - 0x26, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x44, 0x65, 0x6c, - 0x65, 0x74, 0x65, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x52, - 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x59, 0x0a, 0x10, 0x66, 0x65, 0x61, 0x73, 0x74, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x42, 0x10, 0x43, 0x6f, 0x72, - 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x5a, 0x33, 0x67, - 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2d, - 0x64, 0x65, 0x76, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x73, 0x64, 0x6b, 0x2f, 0x67, 0x6f, - 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x63, 0x6f, - 0x72, 0x65, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} - -var ( - file_feast_core_CoreService_proto_rawDescOnce sync.Once - file_feast_core_CoreService_proto_rawDescData = file_feast_core_CoreService_proto_rawDesc -) - -func file_feast_core_CoreService_proto_rawDescGZIP() []byte { - file_feast_core_CoreService_proto_rawDescOnce.Do(func() { - file_feast_core_CoreService_proto_rawDescData = protoimpl.X.CompressGZIP(file_feast_core_CoreService_proto_rawDescData) - }) - return file_feast_core_CoreService_proto_rawDescData -} - -var file_feast_core_CoreService_proto_enumTypes = make([]protoimpl.EnumInfo, 1) -var file_feast_core_CoreService_proto_msgTypes = make([]protoimpl.MessageInfo, 37) -var file_feast_core_CoreService_proto_goTypes = []interface{}{ - (UpdateStoreResponse_Status)(0), // 0: feast.core.UpdateStoreResponse.Status - (*GetEntityRequest)(nil), // 1: feast.core.GetEntityRequest - (*GetEntityResponse)(nil), // 2: feast.core.GetEntityResponse - (*ListEntitiesRequest)(nil), // 3: feast.core.ListEntitiesRequest - (*ListEntitiesResponse)(nil), // 4: feast.core.ListEntitiesResponse - (*ListFeaturesRequest)(nil), // 5: feast.core.ListFeaturesRequest - (*ListFeaturesResponse)(nil), // 6: feast.core.ListFeaturesResponse - (*ListStoresRequest)(nil), // 7: feast.core.ListStoresRequest - (*ListStoresResponse)(nil), // 8: feast.core.ListStoresResponse - (*ApplyEntityRequest)(nil), // 9: feast.core.ApplyEntityRequest - (*ApplyEntityResponse)(nil), // 10: feast.core.ApplyEntityResponse - (*GetFeastCoreVersionRequest)(nil), // 11: feast.core.GetFeastCoreVersionRequest - (*GetFeastCoreVersionResponse)(nil), // 12: feast.core.GetFeastCoreVersionResponse - (*UpdateStoreRequest)(nil), // 13: feast.core.UpdateStoreRequest - (*UpdateStoreResponse)(nil), // 14: feast.core.UpdateStoreResponse - (*CreateProjectRequest)(nil), // 15: feast.core.CreateProjectRequest - (*CreateProjectResponse)(nil), // 16: feast.core.CreateProjectResponse - (*ArchiveProjectRequest)(nil), // 17: feast.core.ArchiveProjectRequest - (*ArchiveProjectResponse)(nil), // 18: feast.core.ArchiveProjectResponse - (*ListProjectsRequest)(nil), // 19: feast.core.ListProjectsRequest - (*ListProjectsResponse)(nil), // 20: feast.core.ListProjectsResponse - (*UpdateFeatureSetStatusResponse)(nil), // 21: feast.core.UpdateFeatureSetStatusResponse - (*ApplyFeatureTableRequest)(nil), // 22: feast.core.ApplyFeatureTableRequest - (*ApplyFeatureTableResponse)(nil), // 23: feast.core.ApplyFeatureTableResponse - (*GetFeatureTableRequest)(nil), // 24: feast.core.GetFeatureTableRequest - (*GetFeatureTableResponse)(nil), // 25: feast.core.GetFeatureTableResponse - (*ListFeatureTablesRequest)(nil), // 26: feast.core.ListFeatureTablesRequest - (*ListFeatureTablesResponse)(nil), // 27: feast.core.ListFeatureTablesResponse - (*DeleteFeatureTableRequest)(nil), // 28: feast.core.DeleteFeatureTableRequest - (*DeleteFeatureTableResponse)(nil), // 29: feast.core.DeleteFeatureTableResponse - (*ListEntitiesRequest_Filter)(nil), // 30: feast.core.ListEntitiesRequest.Filter - nil, // 31: feast.core.ListEntitiesRequest.Filter.LabelsEntry - (*ListFeaturesRequest_Filter)(nil), // 32: feast.core.ListFeaturesRequest.Filter - nil, // 33: feast.core.ListFeaturesRequest.Filter.LabelsEntry - nil, // 34: feast.core.ListFeaturesResponse.FeaturesEntry - (*ListStoresRequest_Filter)(nil), // 35: feast.core.ListStoresRequest.Filter - (*ListFeatureTablesRequest_Filter)(nil), // 36: feast.core.ListFeatureTablesRequest.Filter - nil, // 37: feast.core.ListFeatureTablesRequest.Filter.LabelsEntry - (*Entity)(nil), // 38: feast.core.Entity - (*Store)(nil), // 39: feast.core.Store - (*EntitySpecV2)(nil), // 40: feast.core.EntitySpecV2 - (*FeatureTableSpec)(nil), // 41: feast.core.FeatureTableSpec - (*FeatureTable)(nil), // 42: feast.core.FeatureTable - (*FeatureSpecV2)(nil), // 43: feast.core.FeatureSpecV2 -} -var file_feast_core_CoreService_proto_depIdxs = []int32{ - 38, // 0: feast.core.GetEntityResponse.entity:type_name -> feast.core.Entity - 30, // 1: feast.core.ListEntitiesRequest.filter:type_name -> feast.core.ListEntitiesRequest.Filter - 38, // 2: feast.core.ListEntitiesResponse.entities:type_name -> feast.core.Entity - 32, // 3: feast.core.ListFeaturesRequest.filter:type_name -> feast.core.ListFeaturesRequest.Filter - 34, // 4: feast.core.ListFeaturesResponse.features:type_name -> feast.core.ListFeaturesResponse.FeaturesEntry - 35, // 5: feast.core.ListStoresRequest.filter:type_name -> feast.core.ListStoresRequest.Filter - 39, // 6: feast.core.ListStoresResponse.store:type_name -> feast.core.Store - 40, // 7: feast.core.ApplyEntityRequest.spec:type_name -> feast.core.EntitySpecV2 - 38, // 8: feast.core.ApplyEntityResponse.entity:type_name -> feast.core.Entity - 39, // 9: feast.core.UpdateStoreRequest.store:type_name -> feast.core.Store - 39, // 10: feast.core.UpdateStoreResponse.store:type_name -> feast.core.Store - 0, // 11: feast.core.UpdateStoreResponse.status:type_name -> feast.core.UpdateStoreResponse.Status - 41, // 12: feast.core.ApplyFeatureTableRequest.table_spec:type_name -> feast.core.FeatureTableSpec - 42, // 13: feast.core.ApplyFeatureTableResponse.table:type_name -> feast.core.FeatureTable - 42, // 14: feast.core.GetFeatureTableResponse.table:type_name -> feast.core.FeatureTable - 36, // 15: feast.core.ListFeatureTablesRequest.filter:type_name -> feast.core.ListFeatureTablesRequest.Filter - 42, // 16: feast.core.ListFeatureTablesResponse.tables:type_name -> feast.core.FeatureTable - 31, // 17: feast.core.ListEntitiesRequest.Filter.labels:type_name -> feast.core.ListEntitiesRequest.Filter.LabelsEntry - 33, // 18: feast.core.ListFeaturesRequest.Filter.labels:type_name -> feast.core.ListFeaturesRequest.Filter.LabelsEntry - 43, // 19: feast.core.ListFeaturesResponse.FeaturesEntry.value:type_name -> feast.core.FeatureSpecV2 - 37, // 20: feast.core.ListFeatureTablesRequest.Filter.labels:type_name -> feast.core.ListFeatureTablesRequest.Filter.LabelsEntry - 11, // 21: feast.core.CoreService.GetFeastCoreVersion:input_type -> feast.core.GetFeastCoreVersionRequest - 1, // 22: feast.core.CoreService.GetEntity:input_type -> feast.core.GetEntityRequest - 5, // 23: feast.core.CoreService.ListFeatures:input_type -> feast.core.ListFeaturesRequest - 7, // 24: feast.core.CoreService.ListStores:input_type -> feast.core.ListStoresRequest - 9, // 25: feast.core.CoreService.ApplyEntity:input_type -> feast.core.ApplyEntityRequest - 3, // 26: feast.core.CoreService.ListEntities:input_type -> feast.core.ListEntitiesRequest - 13, // 27: feast.core.CoreService.UpdateStore:input_type -> feast.core.UpdateStoreRequest - 15, // 28: feast.core.CoreService.CreateProject:input_type -> feast.core.CreateProjectRequest - 17, // 29: feast.core.CoreService.ArchiveProject:input_type -> feast.core.ArchiveProjectRequest - 19, // 30: feast.core.CoreService.ListProjects:input_type -> feast.core.ListProjectsRequest - 22, // 31: feast.core.CoreService.ApplyFeatureTable:input_type -> feast.core.ApplyFeatureTableRequest - 26, // 32: feast.core.CoreService.ListFeatureTables:input_type -> feast.core.ListFeatureTablesRequest - 24, // 33: feast.core.CoreService.GetFeatureTable:input_type -> feast.core.GetFeatureTableRequest - 28, // 34: feast.core.CoreService.DeleteFeatureTable:input_type -> feast.core.DeleteFeatureTableRequest - 12, // 35: feast.core.CoreService.GetFeastCoreVersion:output_type -> feast.core.GetFeastCoreVersionResponse - 2, // 36: feast.core.CoreService.GetEntity:output_type -> feast.core.GetEntityResponse - 6, // 37: feast.core.CoreService.ListFeatures:output_type -> feast.core.ListFeaturesResponse - 8, // 38: feast.core.CoreService.ListStores:output_type -> feast.core.ListStoresResponse - 10, // 39: feast.core.CoreService.ApplyEntity:output_type -> feast.core.ApplyEntityResponse - 4, // 40: feast.core.CoreService.ListEntities:output_type -> feast.core.ListEntitiesResponse - 14, // 41: feast.core.CoreService.UpdateStore:output_type -> feast.core.UpdateStoreResponse - 16, // 42: feast.core.CoreService.CreateProject:output_type -> feast.core.CreateProjectResponse - 18, // 43: feast.core.CoreService.ArchiveProject:output_type -> feast.core.ArchiveProjectResponse - 20, // 44: feast.core.CoreService.ListProjects:output_type -> feast.core.ListProjectsResponse - 23, // 45: feast.core.CoreService.ApplyFeatureTable:output_type -> feast.core.ApplyFeatureTableResponse - 27, // 46: feast.core.CoreService.ListFeatureTables:output_type -> feast.core.ListFeatureTablesResponse - 25, // 47: feast.core.CoreService.GetFeatureTable:output_type -> feast.core.GetFeatureTableResponse - 29, // 48: feast.core.CoreService.DeleteFeatureTable:output_type -> feast.core.DeleteFeatureTableResponse - 35, // [35:49] is the sub-list for method output_type - 21, // [21:35] is the sub-list for method input_type - 21, // [21:21] is the sub-list for extension type_name - 21, // [21:21] is the sub-list for extension extendee - 0, // [0:21] is the sub-list for field type_name -} - -func init() { file_feast_core_CoreService_proto_init() } -func file_feast_core_CoreService_proto_init() { - if File_feast_core_CoreService_proto != nil { - return - } - file_feast_core_Entity_proto_init() - file_feast_core_Feature_proto_init() - file_feast_core_FeatureTable_proto_init() - file_feast_core_Store_proto_init() - if !protoimpl.UnsafeEnabled { - file_feast_core_CoreService_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetEntityRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_CoreService_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetEntityResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_CoreService_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListEntitiesRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_CoreService_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListEntitiesResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_CoreService_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListFeaturesRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_CoreService_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListFeaturesResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_CoreService_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListStoresRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_CoreService_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListStoresResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_CoreService_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ApplyEntityRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_CoreService_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ApplyEntityResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_CoreService_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetFeastCoreVersionRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_CoreService_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetFeastCoreVersionResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_CoreService_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*UpdateStoreRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_CoreService_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*UpdateStoreResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_CoreService_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CreateProjectRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_CoreService_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CreateProjectResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_CoreService_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ArchiveProjectRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_CoreService_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ArchiveProjectResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_CoreService_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListProjectsRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_CoreService_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListProjectsResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_CoreService_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*UpdateFeatureSetStatusResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_CoreService_proto_msgTypes[21].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ApplyFeatureTableRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_CoreService_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ApplyFeatureTableResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_CoreService_proto_msgTypes[23].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetFeatureTableRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_CoreService_proto_msgTypes[24].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetFeatureTableResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_CoreService_proto_msgTypes[25].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListFeatureTablesRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_CoreService_proto_msgTypes[26].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListFeatureTablesResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_CoreService_proto_msgTypes[27].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DeleteFeatureTableRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_CoreService_proto_msgTypes[28].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DeleteFeatureTableResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_CoreService_proto_msgTypes[29].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListEntitiesRequest_Filter); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_CoreService_proto_msgTypes[31].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListFeaturesRequest_Filter); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_CoreService_proto_msgTypes[34].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListStoresRequest_Filter); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_CoreService_proto_msgTypes[35].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListFeatureTablesRequest_Filter); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - type x struct{} - out := protoimpl.TypeBuilder{ - File: protoimpl.DescBuilder{ - GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_feast_core_CoreService_proto_rawDesc, - NumEnums: 1, - NumMessages: 37, - NumExtensions: 0, - NumServices: 1, - }, - GoTypes: file_feast_core_CoreService_proto_goTypes, - DependencyIndexes: file_feast_core_CoreService_proto_depIdxs, - EnumInfos: file_feast_core_CoreService_proto_enumTypes, - MessageInfos: file_feast_core_CoreService_proto_msgTypes, - }.Build() - File_feast_core_CoreService_proto = out.File - file_feast_core_CoreService_proto_rawDesc = nil - file_feast_core_CoreService_proto_goTypes = nil - file_feast_core_CoreService_proto_depIdxs = nil -} - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConnInterface - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion6 - -// CoreServiceClient is the client API for CoreService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. -type CoreServiceClient interface { - // Retrieve version information about this Feast deployment - GetFeastCoreVersion(ctx context.Context, in *GetFeastCoreVersionRequest, opts ...grpc.CallOption) (*GetFeastCoreVersionResponse, error) - // Returns a specific entity - GetEntity(ctx context.Context, in *GetEntityRequest, opts ...grpc.CallOption) (*GetEntityResponse, error) - // Returns all feature references and respective features matching that filter. If none are found - // an empty map will be returned - // If no filter is provided in the request, the response will contain all the features - // currently stored in the default project. - ListFeatures(ctx context.Context, in *ListFeaturesRequest, opts ...grpc.CallOption) (*ListFeaturesResponse, error) - // Retrieve store details given a filter. - // - // Returns all stores matching that filter. If none are found, an empty list will be returned. - // If no filter is provided in the request, the response will contain all the stores currently - // stored in the registry. - ListStores(ctx context.Context, in *ListStoresRequest, opts ...grpc.CallOption) (*ListStoresResponse, error) - // Create or update and existing entity. - // - // This function is idempotent - it will not create a new entity if schema does not change. - // Schema changes will update the entity if the changes are valid. - // Following changes are not valid: - // - Changes to name - // - Changes to type - ApplyEntity(ctx context.Context, in *ApplyEntityRequest, opts ...grpc.CallOption) (*ApplyEntityResponse, error) - // Returns all entity references and respective entities matching that filter. If none are found - // an empty map will be returned - // If no filter is provided in the request, the response will contain all the entities - // currently stored in the default project. - ListEntities(ctx context.Context, in *ListEntitiesRequest, opts ...grpc.CallOption) (*ListEntitiesResponse, error) - // Updates core with the configuration of the store. - // - // If the changes are valid, core will return the given store configuration in response, and - // start or update the necessary feature population jobs for the updated store. - UpdateStore(ctx context.Context, in *UpdateStoreRequest, opts ...grpc.CallOption) (*UpdateStoreResponse, error) - // Creates a project. Projects serve as namespaces within which resources like features will be - // created. Feature table names as must be unique within a project while field (Feature/Entity) names - // must be unique within a Feature Table. Project names themselves must be globally unique. - CreateProject(ctx context.Context, in *CreateProjectRequest, opts ...grpc.CallOption) (*CreateProjectResponse, error) - // Archives a project. Archived projects will continue to exist and function, but won't be visible - // through the Core API. Any existing ingestion or serving requests will continue to function, - // but will result in warning messages being logged. It is not possible to unarchive a project - // through the Core API - ArchiveProject(ctx context.Context, in *ArchiveProjectRequest, opts ...grpc.CallOption) (*ArchiveProjectResponse, error) - // Lists all projects active projects. - ListProjects(ctx context.Context, in *ListProjectsRequest, opts ...grpc.CallOption) (*ListProjectsResponse, error) - // Create or update an existing feature table. - // This function is idempotent - it will not create a new feature table if the schema does not change. - // Schema changes will update the feature table if the changes are valid. - // All changes except the following are valid: - // - Changes to feature table name. - // - Changes to entities - // - Changes to feature name and type - ApplyFeatureTable(ctx context.Context, in *ApplyFeatureTableRequest, opts ...grpc.CallOption) (*ApplyFeatureTableResponse, error) - // List feature tables that match a given filter. - // Returns the references of the Feature Tables matching that filter. If none are found, - // an empty list will be returned. - // If no filter is provided in the request, the response will match all the feature - // tables currently stored in the registry. - ListFeatureTables(ctx context.Context, in *ListFeatureTablesRequest, opts ...grpc.CallOption) (*ListFeatureTablesResponse, error) - // Returns a specific feature table - GetFeatureTable(ctx context.Context, in *GetFeatureTableRequest, opts ...grpc.CallOption) (*GetFeatureTableResponse, error) - // Delete a specific feature table - DeleteFeatureTable(ctx context.Context, in *DeleteFeatureTableRequest, opts ...grpc.CallOption) (*DeleteFeatureTableResponse, error) -} - -type coreServiceClient struct { - cc grpc.ClientConnInterface -} - -func NewCoreServiceClient(cc grpc.ClientConnInterface) CoreServiceClient { - return &coreServiceClient{cc} -} - -func (c *coreServiceClient) GetFeastCoreVersion(ctx context.Context, in *GetFeastCoreVersionRequest, opts ...grpc.CallOption) (*GetFeastCoreVersionResponse, error) { - out := new(GetFeastCoreVersionResponse) - err := c.cc.Invoke(ctx, "/feast.core.CoreService/GetFeastCoreVersion", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *coreServiceClient) GetEntity(ctx context.Context, in *GetEntityRequest, opts ...grpc.CallOption) (*GetEntityResponse, error) { - out := new(GetEntityResponse) - err := c.cc.Invoke(ctx, "/feast.core.CoreService/GetEntity", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *coreServiceClient) ListFeatures(ctx context.Context, in *ListFeaturesRequest, opts ...grpc.CallOption) (*ListFeaturesResponse, error) { - out := new(ListFeaturesResponse) - err := c.cc.Invoke(ctx, "/feast.core.CoreService/ListFeatures", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *coreServiceClient) ListStores(ctx context.Context, in *ListStoresRequest, opts ...grpc.CallOption) (*ListStoresResponse, error) { - out := new(ListStoresResponse) - err := c.cc.Invoke(ctx, "/feast.core.CoreService/ListStores", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *coreServiceClient) ApplyEntity(ctx context.Context, in *ApplyEntityRequest, opts ...grpc.CallOption) (*ApplyEntityResponse, error) { - out := new(ApplyEntityResponse) - err := c.cc.Invoke(ctx, "/feast.core.CoreService/ApplyEntity", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *coreServiceClient) ListEntities(ctx context.Context, in *ListEntitiesRequest, opts ...grpc.CallOption) (*ListEntitiesResponse, error) { - out := new(ListEntitiesResponse) - err := c.cc.Invoke(ctx, "/feast.core.CoreService/ListEntities", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *coreServiceClient) UpdateStore(ctx context.Context, in *UpdateStoreRequest, opts ...grpc.CallOption) (*UpdateStoreResponse, error) { - out := new(UpdateStoreResponse) - err := c.cc.Invoke(ctx, "/feast.core.CoreService/UpdateStore", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *coreServiceClient) CreateProject(ctx context.Context, in *CreateProjectRequest, opts ...grpc.CallOption) (*CreateProjectResponse, error) { - out := new(CreateProjectResponse) - err := c.cc.Invoke(ctx, "/feast.core.CoreService/CreateProject", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *coreServiceClient) ArchiveProject(ctx context.Context, in *ArchiveProjectRequest, opts ...grpc.CallOption) (*ArchiveProjectResponse, error) { - out := new(ArchiveProjectResponse) - err := c.cc.Invoke(ctx, "/feast.core.CoreService/ArchiveProject", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *coreServiceClient) ListProjects(ctx context.Context, in *ListProjectsRequest, opts ...grpc.CallOption) (*ListProjectsResponse, error) { - out := new(ListProjectsResponse) - err := c.cc.Invoke(ctx, "/feast.core.CoreService/ListProjects", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *coreServiceClient) ApplyFeatureTable(ctx context.Context, in *ApplyFeatureTableRequest, opts ...grpc.CallOption) (*ApplyFeatureTableResponse, error) { - out := new(ApplyFeatureTableResponse) - err := c.cc.Invoke(ctx, "/feast.core.CoreService/ApplyFeatureTable", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *coreServiceClient) ListFeatureTables(ctx context.Context, in *ListFeatureTablesRequest, opts ...grpc.CallOption) (*ListFeatureTablesResponse, error) { - out := new(ListFeatureTablesResponse) - err := c.cc.Invoke(ctx, "/feast.core.CoreService/ListFeatureTables", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *coreServiceClient) GetFeatureTable(ctx context.Context, in *GetFeatureTableRequest, opts ...grpc.CallOption) (*GetFeatureTableResponse, error) { - out := new(GetFeatureTableResponse) - err := c.cc.Invoke(ctx, "/feast.core.CoreService/GetFeatureTable", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *coreServiceClient) DeleteFeatureTable(ctx context.Context, in *DeleteFeatureTableRequest, opts ...grpc.CallOption) (*DeleteFeatureTableResponse, error) { - out := new(DeleteFeatureTableResponse) - err := c.cc.Invoke(ctx, "/feast.core.CoreService/DeleteFeatureTable", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// CoreServiceServer is the server API for CoreService service. -type CoreServiceServer interface { - // Retrieve version information about this Feast deployment - GetFeastCoreVersion(context.Context, *GetFeastCoreVersionRequest) (*GetFeastCoreVersionResponse, error) - // Returns a specific entity - GetEntity(context.Context, *GetEntityRequest) (*GetEntityResponse, error) - // Returns all feature references and respective features matching that filter. If none are found - // an empty map will be returned - // If no filter is provided in the request, the response will contain all the features - // currently stored in the default project. - ListFeatures(context.Context, *ListFeaturesRequest) (*ListFeaturesResponse, error) - // Retrieve store details given a filter. - // - // Returns all stores matching that filter. If none are found, an empty list will be returned. - // If no filter is provided in the request, the response will contain all the stores currently - // stored in the registry. - ListStores(context.Context, *ListStoresRequest) (*ListStoresResponse, error) - // Create or update and existing entity. - // - // This function is idempotent - it will not create a new entity if schema does not change. - // Schema changes will update the entity if the changes are valid. - // Following changes are not valid: - // - Changes to name - // - Changes to type - ApplyEntity(context.Context, *ApplyEntityRequest) (*ApplyEntityResponse, error) - // Returns all entity references and respective entities matching that filter. If none are found - // an empty map will be returned - // If no filter is provided in the request, the response will contain all the entities - // currently stored in the default project. - ListEntities(context.Context, *ListEntitiesRequest) (*ListEntitiesResponse, error) - // Updates core with the configuration of the store. - // - // If the changes are valid, core will return the given store configuration in response, and - // start or update the necessary feature population jobs for the updated store. - UpdateStore(context.Context, *UpdateStoreRequest) (*UpdateStoreResponse, error) - // Creates a project. Projects serve as namespaces within which resources like features will be - // created. Feature table names as must be unique within a project while field (Feature/Entity) names - // must be unique within a Feature Table. Project names themselves must be globally unique. - CreateProject(context.Context, *CreateProjectRequest) (*CreateProjectResponse, error) - // Archives a project. Archived projects will continue to exist and function, but won't be visible - // through the Core API. Any existing ingestion or serving requests will continue to function, - // but will result in warning messages being logged. It is not possible to unarchive a project - // through the Core API - ArchiveProject(context.Context, *ArchiveProjectRequest) (*ArchiveProjectResponse, error) - // Lists all projects active projects. - ListProjects(context.Context, *ListProjectsRequest) (*ListProjectsResponse, error) - // Create or update an existing feature table. - // This function is idempotent - it will not create a new feature table if the schema does not change. - // Schema changes will update the feature table if the changes are valid. - // All changes except the following are valid: - // - Changes to feature table name. - // - Changes to entities - // - Changes to feature name and type - ApplyFeatureTable(context.Context, *ApplyFeatureTableRequest) (*ApplyFeatureTableResponse, error) - // List feature tables that match a given filter. - // Returns the references of the Feature Tables matching that filter. If none are found, - // an empty list will be returned. - // If no filter is provided in the request, the response will match all the feature - // tables currently stored in the registry. - ListFeatureTables(context.Context, *ListFeatureTablesRequest) (*ListFeatureTablesResponse, error) - // Returns a specific feature table - GetFeatureTable(context.Context, *GetFeatureTableRequest) (*GetFeatureTableResponse, error) - // Delete a specific feature table - DeleteFeatureTable(context.Context, *DeleteFeatureTableRequest) (*DeleteFeatureTableResponse, error) -} - -// UnimplementedCoreServiceServer can be embedded to have forward compatible implementations. -type UnimplementedCoreServiceServer struct { -} - -func (*UnimplementedCoreServiceServer) GetFeastCoreVersion(context.Context, *GetFeastCoreVersionRequest) (*GetFeastCoreVersionResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetFeastCoreVersion not implemented") -} -func (*UnimplementedCoreServiceServer) GetEntity(context.Context, *GetEntityRequest) (*GetEntityResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetEntity not implemented") -} -func (*UnimplementedCoreServiceServer) ListFeatures(context.Context, *ListFeaturesRequest) (*ListFeaturesResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method ListFeatures not implemented") -} -func (*UnimplementedCoreServiceServer) ListStores(context.Context, *ListStoresRequest) (*ListStoresResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method ListStores not implemented") -} -func (*UnimplementedCoreServiceServer) ApplyEntity(context.Context, *ApplyEntityRequest) (*ApplyEntityResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method ApplyEntity not implemented") -} -func (*UnimplementedCoreServiceServer) ListEntities(context.Context, *ListEntitiesRequest) (*ListEntitiesResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method ListEntities not implemented") -} -func (*UnimplementedCoreServiceServer) UpdateStore(context.Context, *UpdateStoreRequest) (*UpdateStoreResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method UpdateStore not implemented") -} -func (*UnimplementedCoreServiceServer) CreateProject(context.Context, *CreateProjectRequest) (*CreateProjectResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method CreateProject not implemented") -} -func (*UnimplementedCoreServiceServer) ArchiveProject(context.Context, *ArchiveProjectRequest) (*ArchiveProjectResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method ArchiveProject not implemented") -} -func (*UnimplementedCoreServiceServer) ListProjects(context.Context, *ListProjectsRequest) (*ListProjectsResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method ListProjects not implemented") -} -func (*UnimplementedCoreServiceServer) ApplyFeatureTable(context.Context, *ApplyFeatureTableRequest) (*ApplyFeatureTableResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method ApplyFeatureTable not implemented") -} -func (*UnimplementedCoreServiceServer) ListFeatureTables(context.Context, *ListFeatureTablesRequest) (*ListFeatureTablesResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method ListFeatureTables not implemented") -} -func (*UnimplementedCoreServiceServer) GetFeatureTable(context.Context, *GetFeatureTableRequest) (*GetFeatureTableResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetFeatureTable not implemented") -} -func (*UnimplementedCoreServiceServer) DeleteFeatureTable(context.Context, *DeleteFeatureTableRequest) (*DeleteFeatureTableResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method DeleteFeatureTable not implemented") -} - -func RegisterCoreServiceServer(s *grpc.Server, srv CoreServiceServer) { - s.RegisterService(&_CoreService_serviceDesc, srv) -} - -func _CoreService_GetFeastCoreVersion_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetFeastCoreVersionRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(CoreServiceServer).GetFeastCoreVersion(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.core.CoreService/GetFeastCoreVersion", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(CoreServiceServer).GetFeastCoreVersion(ctx, req.(*GetFeastCoreVersionRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _CoreService_GetEntity_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetEntityRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(CoreServiceServer).GetEntity(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.core.CoreService/GetEntity", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(CoreServiceServer).GetEntity(ctx, req.(*GetEntityRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _CoreService_ListFeatures_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(ListFeaturesRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(CoreServiceServer).ListFeatures(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.core.CoreService/ListFeatures", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(CoreServiceServer).ListFeatures(ctx, req.(*ListFeaturesRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _CoreService_ListStores_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(ListStoresRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(CoreServiceServer).ListStores(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.core.CoreService/ListStores", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(CoreServiceServer).ListStores(ctx, req.(*ListStoresRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _CoreService_ApplyEntity_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(ApplyEntityRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(CoreServiceServer).ApplyEntity(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.core.CoreService/ApplyEntity", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(CoreServiceServer).ApplyEntity(ctx, req.(*ApplyEntityRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _CoreService_ListEntities_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(ListEntitiesRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(CoreServiceServer).ListEntities(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.core.CoreService/ListEntities", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(CoreServiceServer).ListEntities(ctx, req.(*ListEntitiesRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _CoreService_UpdateStore_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(UpdateStoreRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(CoreServiceServer).UpdateStore(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.core.CoreService/UpdateStore", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(CoreServiceServer).UpdateStore(ctx, req.(*UpdateStoreRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _CoreService_CreateProject_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(CreateProjectRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(CoreServiceServer).CreateProject(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.core.CoreService/CreateProject", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(CoreServiceServer).CreateProject(ctx, req.(*CreateProjectRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _CoreService_ArchiveProject_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(ArchiveProjectRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(CoreServiceServer).ArchiveProject(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.core.CoreService/ArchiveProject", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(CoreServiceServer).ArchiveProject(ctx, req.(*ArchiveProjectRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _CoreService_ListProjects_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(ListProjectsRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(CoreServiceServer).ListProjects(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.core.CoreService/ListProjects", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(CoreServiceServer).ListProjects(ctx, req.(*ListProjectsRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _CoreService_ApplyFeatureTable_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(ApplyFeatureTableRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(CoreServiceServer).ApplyFeatureTable(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.core.CoreService/ApplyFeatureTable", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(CoreServiceServer).ApplyFeatureTable(ctx, req.(*ApplyFeatureTableRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _CoreService_ListFeatureTables_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(ListFeatureTablesRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(CoreServiceServer).ListFeatureTables(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.core.CoreService/ListFeatureTables", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(CoreServiceServer).ListFeatureTables(ctx, req.(*ListFeatureTablesRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _CoreService_GetFeatureTable_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetFeatureTableRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(CoreServiceServer).GetFeatureTable(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.core.CoreService/GetFeatureTable", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(CoreServiceServer).GetFeatureTable(ctx, req.(*GetFeatureTableRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _CoreService_DeleteFeatureTable_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(DeleteFeatureTableRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(CoreServiceServer).DeleteFeatureTable(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.core.CoreService/DeleteFeatureTable", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(CoreServiceServer).DeleteFeatureTable(ctx, req.(*DeleteFeatureTableRequest)) - } - return interceptor(ctx, in, info, handler) -} - -var _CoreService_serviceDesc = grpc.ServiceDesc{ - ServiceName: "feast.core.CoreService", - HandlerType: (*CoreServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "GetFeastCoreVersion", - Handler: _CoreService_GetFeastCoreVersion_Handler, - }, - { - MethodName: "GetEntity", - Handler: _CoreService_GetEntity_Handler, - }, - { - MethodName: "ListFeatures", - Handler: _CoreService_ListFeatures_Handler, - }, - { - MethodName: "ListStores", - Handler: _CoreService_ListStores_Handler, - }, - { - MethodName: "ApplyEntity", - Handler: _CoreService_ApplyEntity_Handler, - }, - { - MethodName: "ListEntities", - Handler: _CoreService_ListEntities_Handler, - }, - { - MethodName: "UpdateStore", - Handler: _CoreService_UpdateStore_Handler, - }, - { - MethodName: "CreateProject", - Handler: _CoreService_CreateProject_Handler, - }, - { - MethodName: "ArchiveProject", - Handler: _CoreService_ArchiveProject_Handler, - }, - { - MethodName: "ListProjects", - Handler: _CoreService_ListProjects_Handler, - }, - { - MethodName: "ApplyFeatureTable", - Handler: _CoreService_ApplyFeatureTable_Handler, - }, - { - MethodName: "ListFeatureTables", - Handler: _CoreService_ListFeatureTables_Handler, - }, - { - MethodName: "GetFeatureTable", - Handler: _CoreService_GetFeatureTable_Handler, - }, - { - MethodName: "DeleteFeatureTable", - Handler: _CoreService_DeleteFeatureTable_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "feast/core/CoreService.proto", -} diff --git a/sdk/go/protos/feast/core/DataFormat.pb.go b/sdk/go/protos/feast/core/DataFormat.pb.go deleted file mode 100644 index 4a766cc8f9..0000000000 --- a/sdk/go/protos/feast/core/DataFormat.pb.go +++ /dev/null @@ -1,494 +0,0 @@ -// -// Copyright 2020 The Feast Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -// Code generated by protoc-gen-go. DO NOT EDIT. -// versions: -// protoc-gen-go v1.25.0 -// protoc v3.12.4 -// source: feast/core/DataFormat.proto - -package core - -import ( - proto "github.com/golang/protobuf/proto" - protoreflect "google.golang.org/protobuf/reflect/protoreflect" - protoimpl "google.golang.org/protobuf/runtime/protoimpl" - reflect "reflect" - sync "sync" -) - -const ( - // Verify that this generated code is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) - // Verify that runtime/protoimpl is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) -) - -// This is a compile-time assertion that a sufficiently up-to-date version -// of the legacy proto package is being used. -const _ = proto.ProtoPackageIsVersion4 - -// Defines the file format encoding the features/entity data in files -type FileFormat struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Types that are assignable to Format: - // *FileFormat_ParquetFormat_ - Format isFileFormat_Format `protobuf_oneof:"format"` -} - -func (x *FileFormat) Reset() { - *x = FileFormat{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_DataFormat_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *FileFormat) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*FileFormat) ProtoMessage() {} - -func (x *FileFormat) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_DataFormat_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use FileFormat.ProtoReflect.Descriptor instead. -func (*FileFormat) Descriptor() ([]byte, []int) { - return file_feast_core_DataFormat_proto_rawDescGZIP(), []int{0} -} - -func (m *FileFormat) GetFormat() isFileFormat_Format { - if m != nil { - return m.Format - } - return nil -} - -func (x *FileFormat) GetParquetFormat() *FileFormat_ParquetFormat { - if x, ok := x.GetFormat().(*FileFormat_ParquetFormat_); ok { - return x.ParquetFormat - } - return nil -} - -type isFileFormat_Format interface { - isFileFormat_Format() -} - -type FileFormat_ParquetFormat_ struct { - ParquetFormat *FileFormat_ParquetFormat `protobuf:"bytes,1,opt,name=parquet_format,json=parquetFormat,proto3,oneof"` -} - -func (*FileFormat_ParquetFormat_) isFileFormat_Format() {} - -// Defines the data format encoding features/entity data in data streams -type StreamFormat struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Specifies the data format and format specific options - // - // Types that are assignable to Format: - // *StreamFormat_AvroFormat_ - // *StreamFormat_ProtoFormat_ - Format isStreamFormat_Format `protobuf_oneof:"format"` -} - -func (x *StreamFormat) Reset() { - *x = StreamFormat{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_DataFormat_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *StreamFormat) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*StreamFormat) ProtoMessage() {} - -func (x *StreamFormat) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_DataFormat_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use StreamFormat.ProtoReflect.Descriptor instead. -func (*StreamFormat) Descriptor() ([]byte, []int) { - return file_feast_core_DataFormat_proto_rawDescGZIP(), []int{1} -} - -func (m *StreamFormat) GetFormat() isStreamFormat_Format { - if m != nil { - return m.Format - } - return nil -} - -func (x *StreamFormat) GetAvroFormat() *StreamFormat_AvroFormat { - if x, ok := x.GetFormat().(*StreamFormat_AvroFormat_); ok { - return x.AvroFormat - } - return nil -} - -func (x *StreamFormat) GetProtoFormat() *StreamFormat_ProtoFormat { - if x, ok := x.GetFormat().(*StreamFormat_ProtoFormat_); ok { - return x.ProtoFormat - } - return nil -} - -type isStreamFormat_Format interface { - isStreamFormat_Format() -} - -type StreamFormat_AvroFormat_ struct { - AvroFormat *StreamFormat_AvroFormat `protobuf:"bytes,1,opt,name=avro_format,json=avroFormat,proto3,oneof"` -} - -type StreamFormat_ProtoFormat_ struct { - ProtoFormat *StreamFormat_ProtoFormat `protobuf:"bytes,2,opt,name=proto_format,json=protoFormat,proto3,oneof"` -} - -func (*StreamFormat_AvroFormat_) isStreamFormat_Format() {} - -func (*StreamFormat_ProtoFormat_) isStreamFormat_Format() {} - -// Defines options for the Parquet data format -type FileFormat_ParquetFormat struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields -} - -func (x *FileFormat_ParquetFormat) Reset() { - *x = FileFormat_ParquetFormat{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_DataFormat_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *FileFormat_ParquetFormat) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*FileFormat_ParquetFormat) ProtoMessage() {} - -func (x *FileFormat_ParquetFormat) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_DataFormat_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use FileFormat_ParquetFormat.ProtoReflect.Descriptor instead. -func (*FileFormat_ParquetFormat) Descriptor() ([]byte, []int) { - return file_feast_core_DataFormat_proto_rawDescGZIP(), []int{0, 0} -} - -// Defines options for the protobuf data format -type StreamFormat_ProtoFormat struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Classpath to the generated Java Protobuf class that can be used to decode - // Feature data from the obtained stream message - ClassPath string `protobuf:"bytes,1,opt,name=class_path,json=classPath,proto3" json:"class_path,omitempty"` -} - -func (x *StreamFormat_ProtoFormat) Reset() { - *x = StreamFormat_ProtoFormat{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_DataFormat_proto_msgTypes[3] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *StreamFormat_ProtoFormat) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*StreamFormat_ProtoFormat) ProtoMessage() {} - -func (x *StreamFormat_ProtoFormat) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_DataFormat_proto_msgTypes[3] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use StreamFormat_ProtoFormat.ProtoReflect.Descriptor instead. -func (*StreamFormat_ProtoFormat) Descriptor() ([]byte, []int) { - return file_feast_core_DataFormat_proto_rawDescGZIP(), []int{1, 0} -} - -func (x *StreamFormat_ProtoFormat) GetClassPath() string { - if x != nil { - return x.ClassPath - } - return "" -} - -// Defines options for the avro data format -type StreamFormat_AvroFormat struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Optional if used in a File DataSource as schema is embedded in avro file. - // Specifies the schema of the Avro message as JSON string. - SchemaJson string `protobuf:"bytes,1,opt,name=schema_json,json=schemaJson,proto3" json:"schema_json,omitempty"` -} - -func (x *StreamFormat_AvroFormat) Reset() { - *x = StreamFormat_AvroFormat{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_DataFormat_proto_msgTypes[4] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *StreamFormat_AvroFormat) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*StreamFormat_AvroFormat) ProtoMessage() {} - -func (x *StreamFormat_AvroFormat) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_DataFormat_proto_msgTypes[4] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use StreamFormat_AvroFormat.ProtoReflect.Descriptor instead. -func (*StreamFormat_AvroFormat) Descriptor() ([]byte, []int) { - return file_feast_core_DataFormat_proto_rawDescGZIP(), []int{1, 1} -} - -func (x *StreamFormat_AvroFormat) GetSchemaJson() string { - if x != nil { - return x.SchemaJson - } - return "" -} - -var File_feast_core_DataFormat_proto protoreflect.FileDescriptor - -var file_feast_core_DataFormat_proto_rawDesc = []byte{ - 0x0a, 0x1b, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x44, 0x61, 0x74, - 0x61, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0a, 0x66, - 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x22, 0x76, 0x0a, 0x0a, 0x46, 0x69, 0x6c, - 0x65, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x12, 0x4d, 0x0a, 0x0e, 0x70, 0x61, 0x72, 0x71, 0x75, - 0x65, 0x74, 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x24, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x46, 0x69, 0x6c, - 0x65, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x2e, 0x50, 0x61, 0x72, 0x71, 0x75, 0x65, 0x74, 0x46, - 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x48, 0x00, 0x52, 0x0d, 0x70, 0x61, 0x72, 0x71, 0x75, 0x65, 0x74, - 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x1a, 0x0f, 0x0a, 0x0d, 0x50, 0x61, 0x72, 0x71, 0x75, 0x65, - 0x74, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x42, 0x08, 0x0a, 0x06, 0x66, 0x6f, 0x72, 0x6d, 0x61, - 0x74, 0x22, 0x88, 0x02, 0x0a, 0x0c, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x46, 0x6f, 0x72, 0x6d, - 0x61, 0x74, 0x12, 0x46, 0x0a, 0x0b, 0x61, 0x76, 0x72, 0x6f, 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, - 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, - 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x46, 0x6f, 0x72, 0x6d, 0x61, - 0x74, 0x2e, 0x41, 0x76, 0x72, 0x6f, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x48, 0x00, 0x52, 0x0a, - 0x61, 0x76, 0x72, 0x6f, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x12, 0x49, 0x0a, 0x0c, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x24, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x74, - 0x72, 0x65, 0x61, 0x6d, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x2e, 0x50, 0x72, 0x6f, 0x74, 0x6f, - 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x48, 0x00, 0x52, 0x0b, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x46, - 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x1a, 0x2c, 0x0a, 0x0b, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x46, 0x6f, - 0x72, 0x6d, 0x61, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x5f, 0x70, 0x61, - 0x74, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x50, - 0x61, 0x74, 0x68, 0x1a, 0x2d, 0x0a, 0x0a, 0x41, 0x76, 0x72, 0x6f, 0x46, 0x6f, 0x72, 0x6d, 0x61, - 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x5f, 0x6a, 0x73, 0x6f, 0x6e, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x4a, 0x73, - 0x6f, 0x6e, 0x42, 0x08, 0x0a, 0x06, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x42, 0x58, 0x0a, 0x10, - 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x63, 0x6f, 0x72, 0x65, - 0x42, 0x0f, 0x44, 0x61, 0x74, 0x61, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x50, 0x72, 0x6f, 0x74, - 0x6f, 0x5a, 0x33, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x65, - 0x61, 0x73, 0x74, 0x2d, 0x64, 0x65, 0x76, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x73, 0x64, - 0x6b, 0x2f, 0x67, 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x66, 0x65, 0x61, 0x73, - 0x74, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} - -var ( - file_feast_core_DataFormat_proto_rawDescOnce sync.Once - file_feast_core_DataFormat_proto_rawDescData = file_feast_core_DataFormat_proto_rawDesc -) - -func file_feast_core_DataFormat_proto_rawDescGZIP() []byte { - file_feast_core_DataFormat_proto_rawDescOnce.Do(func() { - file_feast_core_DataFormat_proto_rawDescData = protoimpl.X.CompressGZIP(file_feast_core_DataFormat_proto_rawDescData) - }) - return file_feast_core_DataFormat_proto_rawDescData -} - -var file_feast_core_DataFormat_proto_msgTypes = make([]protoimpl.MessageInfo, 5) -var file_feast_core_DataFormat_proto_goTypes = []interface{}{ - (*FileFormat)(nil), // 0: feast.core.FileFormat - (*StreamFormat)(nil), // 1: feast.core.StreamFormat - (*FileFormat_ParquetFormat)(nil), // 2: feast.core.FileFormat.ParquetFormat - (*StreamFormat_ProtoFormat)(nil), // 3: feast.core.StreamFormat.ProtoFormat - (*StreamFormat_AvroFormat)(nil), // 4: feast.core.StreamFormat.AvroFormat -} -var file_feast_core_DataFormat_proto_depIdxs = []int32{ - 2, // 0: feast.core.FileFormat.parquet_format:type_name -> feast.core.FileFormat.ParquetFormat - 4, // 1: feast.core.StreamFormat.avro_format:type_name -> feast.core.StreamFormat.AvroFormat - 3, // 2: feast.core.StreamFormat.proto_format:type_name -> feast.core.StreamFormat.ProtoFormat - 3, // [3:3] is the sub-list for method output_type - 3, // [3:3] is the sub-list for method input_type - 3, // [3:3] is the sub-list for extension type_name - 3, // [3:3] is the sub-list for extension extendee - 0, // [0:3] is the sub-list for field type_name -} - -func init() { file_feast_core_DataFormat_proto_init() } -func file_feast_core_DataFormat_proto_init() { - if File_feast_core_DataFormat_proto != nil { - return - } - if !protoimpl.UnsafeEnabled { - file_feast_core_DataFormat_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*FileFormat); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_DataFormat_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*StreamFormat); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_DataFormat_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*FileFormat_ParquetFormat); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_DataFormat_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*StreamFormat_ProtoFormat); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_DataFormat_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*StreamFormat_AvroFormat); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - file_feast_core_DataFormat_proto_msgTypes[0].OneofWrappers = []interface{}{ - (*FileFormat_ParquetFormat_)(nil), - } - file_feast_core_DataFormat_proto_msgTypes[1].OneofWrappers = []interface{}{ - (*StreamFormat_AvroFormat_)(nil), - (*StreamFormat_ProtoFormat_)(nil), - } - type x struct{} - out := protoimpl.TypeBuilder{ - File: protoimpl.DescBuilder{ - GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_feast_core_DataFormat_proto_rawDesc, - NumEnums: 0, - NumMessages: 5, - NumExtensions: 0, - NumServices: 0, - }, - GoTypes: file_feast_core_DataFormat_proto_goTypes, - DependencyIndexes: file_feast_core_DataFormat_proto_depIdxs, - MessageInfos: file_feast_core_DataFormat_proto_msgTypes, - }.Build() - File_feast_core_DataFormat_proto = out.File - file_feast_core_DataFormat_proto_rawDesc = nil - file_feast_core_DataFormat_proto_goTypes = nil - file_feast_core_DataFormat_proto_depIdxs = nil -} diff --git a/sdk/go/protos/feast/core/DataSource.pb.go b/sdk/go/protos/feast/core/DataSource.pb.go deleted file mode 100644 index 1b43972d32..0000000000 --- a/sdk/go/protos/feast/core/DataSource.pb.go +++ /dev/null @@ -1,734 +0,0 @@ -// -// Copyright 2020 The Feast Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -// Code generated by protoc-gen-go. DO NOT EDIT. -// versions: -// protoc-gen-go v1.25.0 -// protoc v3.12.4 -// source: feast/core/DataSource.proto - -package core - -import ( - proto "github.com/golang/protobuf/proto" - protoreflect "google.golang.org/protobuf/reflect/protoreflect" - protoimpl "google.golang.org/protobuf/runtime/protoimpl" - reflect "reflect" - sync "sync" -) - -const ( - // Verify that this generated code is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) - // Verify that runtime/protoimpl is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) -) - -// This is a compile-time assertion that a sufficiently up-to-date version -// of the legacy proto package is being used. -const _ = proto.ProtoPackageIsVersion4 - -// Type of Data Source. -type DataSource_SourceType int32 - -const ( - DataSource_INVALID DataSource_SourceType = 0 - DataSource_BATCH_FILE DataSource_SourceType = 1 - DataSource_BATCH_BIGQUERY DataSource_SourceType = 2 - DataSource_STREAM_KAFKA DataSource_SourceType = 3 - DataSource_STREAM_KINESIS DataSource_SourceType = 4 -) - -// Enum value maps for DataSource_SourceType. -var ( - DataSource_SourceType_name = map[int32]string{ - 0: "INVALID", - 1: "BATCH_FILE", - 2: "BATCH_BIGQUERY", - 3: "STREAM_KAFKA", - 4: "STREAM_KINESIS", - } - DataSource_SourceType_value = map[string]int32{ - "INVALID": 0, - "BATCH_FILE": 1, - "BATCH_BIGQUERY": 2, - "STREAM_KAFKA": 3, - "STREAM_KINESIS": 4, - } -) - -func (x DataSource_SourceType) Enum() *DataSource_SourceType { - p := new(DataSource_SourceType) - *p = x - return p -} - -func (x DataSource_SourceType) String() string { - return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) -} - -func (DataSource_SourceType) Descriptor() protoreflect.EnumDescriptor { - return file_feast_core_DataSource_proto_enumTypes[0].Descriptor() -} - -func (DataSource_SourceType) Type() protoreflect.EnumType { - return &file_feast_core_DataSource_proto_enumTypes[0] -} - -func (x DataSource_SourceType) Number() protoreflect.EnumNumber { - return protoreflect.EnumNumber(x) -} - -// Deprecated: Use DataSource_SourceType.Descriptor instead. -func (DataSource_SourceType) EnumDescriptor() ([]byte, []int) { - return file_feast_core_DataSource_proto_rawDescGZIP(), []int{0, 0} -} - -// Defines a Data Source that can be used source Feature data -type DataSource struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Type DataSource_SourceType `protobuf:"varint,1,opt,name=type,proto3,enum=feast.core.DataSource_SourceType" json:"type,omitempty"` - // Defines mapping between fields in the sourced data - // and fields in parent FeatureTable. - FieldMapping map[string]string `protobuf:"bytes,2,rep,name=field_mapping,json=fieldMapping,proto3" json:"field_mapping,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - // Must specify event timestamp column name - EventTimestampColumn string `protobuf:"bytes,3,opt,name=event_timestamp_column,json=eventTimestampColumn,proto3" json:"event_timestamp_column,omitempty"` - // (Optional) Specify partition column - // useful for file sources - DatePartitionColumn string `protobuf:"bytes,4,opt,name=date_partition_column,json=datePartitionColumn,proto3" json:"date_partition_column,omitempty"` - // Must specify creation timestamp column name - CreatedTimestampColumn string `protobuf:"bytes,5,opt,name=created_timestamp_column,json=createdTimestampColumn,proto3" json:"created_timestamp_column,omitempty"` - // DataSource options. - // - // Types that are assignable to Options: - // *DataSource_FileOptions_ - // *DataSource_BigqueryOptions - // *DataSource_KafkaOptions_ - // *DataSource_KinesisOptions_ - Options isDataSource_Options `protobuf_oneof:"options"` -} - -func (x *DataSource) Reset() { - *x = DataSource{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_DataSource_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *DataSource) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*DataSource) ProtoMessage() {} - -func (x *DataSource) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_DataSource_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use DataSource.ProtoReflect.Descriptor instead. -func (*DataSource) Descriptor() ([]byte, []int) { - return file_feast_core_DataSource_proto_rawDescGZIP(), []int{0} -} - -func (x *DataSource) GetType() DataSource_SourceType { - if x != nil { - return x.Type - } - return DataSource_INVALID -} - -func (x *DataSource) GetFieldMapping() map[string]string { - if x != nil { - return x.FieldMapping - } - return nil -} - -func (x *DataSource) GetEventTimestampColumn() string { - if x != nil { - return x.EventTimestampColumn - } - return "" -} - -func (x *DataSource) GetDatePartitionColumn() string { - if x != nil { - return x.DatePartitionColumn - } - return "" -} - -func (x *DataSource) GetCreatedTimestampColumn() string { - if x != nil { - return x.CreatedTimestampColumn - } - return "" -} - -func (m *DataSource) GetOptions() isDataSource_Options { - if m != nil { - return m.Options - } - return nil -} - -func (x *DataSource) GetFileOptions() *DataSource_FileOptions { - if x, ok := x.GetOptions().(*DataSource_FileOptions_); ok { - return x.FileOptions - } - return nil -} - -func (x *DataSource) GetBigqueryOptions() *DataSource_BigQueryOptions { - if x, ok := x.GetOptions().(*DataSource_BigqueryOptions); ok { - return x.BigqueryOptions - } - return nil -} - -func (x *DataSource) GetKafkaOptions() *DataSource_KafkaOptions { - if x, ok := x.GetOptions().(*DataSource_KafkaOptions_); ok { - return x.KafkaOptions - } - return nil -} - -func (x *DataSource) GetKinesisOptions() *DataSource_KinesisOptions { - if x, ok := x.GetOptions().(*DataSource_KinesisOptions_); ok { - return x.KinesisOptions - } - return nil -} - -type isDataSource_Options interface { - isDataSource_Options() -} - -type DataSource_FileOptions_ struct { - FileOptions *DataSource_FileOptions `protobuf:"bytes,11,opt,name=file_options,json=fileOptions,proto3,oneof"` -} - -type DataSource_BigqueryOptions struct { - BigqueryOptions *DataSource_BigQueryOptions `protobuf:"bytes,12,opt,name=bigquery_options,json=bigqueryOptions,proto3,oneof"` -} - -type DataSource_KafkaOptions_ struct { - KafkaOptions *DataSource_KafkaOptions `protobuf:"bytes,13,opt,name=kafka_options,json=kafkaOptions,proto3,oneof"` -} - -type DataSource_KinesisOptions_ struct { - KinesisOptions *DataSource_KinesisOptions `protobuf:"bytes,14,opt,name=kinesis_options,json=kinesisOptions,proto3,oneof"` -} - -func (*DataSource_FileOptions_) isDataSource_Options() {} - -func (*DataSource_BigqueryOptions) isDataSource_Options() {} - -func (*DataSource_KafkaOptions_) isDataSource_Options() {} - -func (*DataSource_KinesisOptions_) isDataSource_Options() {} - -// Defines options for DataSource that sources features from a file -type DataSource_FileOptions struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - FileFormat *FileFormat `protobuf:"bytes,1,opt,name=file_format,json=fileFormat,proto3" json:"file_format,omitempty"` - // Target URL of file to retrieve and source features from. - // s3://path/to/file for AWS S3 storage - // gs://path/to/file for GCP GCS storage - // file:///path/to/file for local storage - FileUrl string `protobuf:"bytes,2,opt,name=file_url,json=fileUrl,proto3" json:"file_url,omitempty"` -} - -func (x *DataSource_FileOptions) Reset() { - *x = DataSource_FileOptions{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_DataSource_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *DataSource_FileOptions) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*DataSource_FileOptions) ProtoMessage() {} - -func (x *DataSource_FileOptions) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_DataSource_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use DataSource_FileOptions.ProtoReflect.Descriptor instead. -func (*DataSource_FileOptions) Descriptor() ([]byte, []int) { - return file_feast_core_DataSource_proto_rawDescGZIP(), []int{0, 1} -} - -func (x *DataSource_FileOptions) GetFileFormat() *FileFormat { - if x != nil { - return x.FileFormat - } - return nil -} - -func (x *DataSource_FileOptions) GetFileUrl() string { - if x != nil { - return x.FileUrl - } - return "" -} - -// Defines options for DataSource that sources features from a BigQuery Query -type DataSource_BigQueryOptions struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Full table reference in the form of [project:dataset.table] - TableRef string `protobuf:"bytes,1,opt,name=table_ref,json=tableRef,proto3" json:"table_ref,omitempty"` -} - -func (x *DataSource_BigQueryOptions) Reset() { - *x = DataSource_BigQueryOptions{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_DataSource_proto_msgTypes[3] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *DataSource_BigQueryOptions) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*DataSource_BigQueryOptions) ProtoMessage() {} - -func (x *DataSource_BigQueryOptions) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_DataSource_proto_msgTypes[3] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use DataSource_BigQueryOptions.ProtoReflect.Descriptor instead. -func (*DataSource_BigQueryOptions) Descriptor() ([]byte, []int) { - return file_feast_core_DataSource_proto_rawDescGZIP(), []int{0, 2} -} - -func (x *DataSource_BigQueryOptions) GetTableRef() string { - if x != nil { - return x.TableRef - } - return "" -} - -// Defines options for DataSource that sources features from Kafka messages. -// Each message should be a Protobuf that can be decoded with the generated -// Java Protobuf class at the given class path -type DataSource_KafkaOptions struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Comma separated list of Kafka bootstrap servers. Used for feature tables without a defined source host[:port]] - BootstrapServers string `protobuf:"bytes,1,opt,name=bootstrap_servers,json=bootstrapServers,proto3" json:"bootstrap_servers,omitempty"` - // Kafka topic to collect feature data from. - Topic string `protobuf:"bytes,2,opt,name=topic,proto3" json:"topic,omitempty"` - // Defines the stream data format encoding feature/entity data in Kafka messages. - MessageFormat *StreamFormat `protobuf:"bytes,3,opt,name=message_format,json=messageFormat,proto3" json:"message_format,omitempty"` -} - -func (x *DataSource_KafkaOptions) Reset() { - *x = DataSource_KafkaOptions{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_DataSource_proto_msgTypes[4] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *DataSource_KafkaOptions) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*DataSource_KafkaOptions) ProtoMessage() {} - -func (x *DataSource_KafkaOptions) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_DataSource_proto_msgTypes[4] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use DataSource_KafkaOptions.ProtoReflect.Descriptor instead. -func (*DataSource_KafkaOptions) Descriptor() ([]byte, []int) { - return file_feast_core_DataSource_proto_rawDescGZIP(), []int{0, 3} -} - -func (x *DataSource_KafkaOptions) GetBootstrapServers() string { - if x != nil { - return x.BootstrapServers - } - return "" -} - -func (x *DataSource_KafkaOptions) GetTopic() string { - if x != nil { - return x.Topic - } - return "" -} - -func (x *DataSource_KafkaOptions) GetMessageFormat() *StreamFormat { - if x != nil { - return x.MessageFormat - } - return nil -} - -// Defines options for DataSource that sources features from Kinesis records. -// Each record should be a Protobuf that can be decoded with the generated -// Java Protobuf class at the given class path -type DataSource_KinesisOptions struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // AWS region of the Kinesis stream - Region string `protobuf:"bytes,1,opt,name=region,proto3" json:"region,omitempty"` - // Name of the Kinesis stream to obtain feature data from. - StreamName string `protobuf:"bytes,2,opt,name=stream_name,json=streamName,proto3" json:"stream_name,omitempty"` - // Defines the data format encoding the feature/entity data in Kinesis records. - // Kinesis Data Sources support Avro and Proto as data formats. - RecordFormat *StreamFormat `protobuf:"bytes,3,opt,name=record_format,json=recordFormat,proto3" json:"record_format,omitempty"` -} - -func (x *DataSource_KinesisOptions) Reset() { - *x = DataSource_KinesisOptions{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_DataSource_proto_msgTypes[5] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *DataSource_KinesisOptions) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*DataSource_KinesisOptions) ProtoMessage() {} - -func (x *DataSource_KinesisOptions) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_DataSource_proto_msgTypes[5] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use DataSource_KinesisOptions.ProtoReflect.Descriptor instead. -func (*DataSource_KinesisOptions) Descriptor() ([]byte, []int) { - return file_feast_core_DataSource_proto_rawDescGZIP(), []int{0, 4} -} - -func (x *DataSource_KinesisOptions) GetRegion() string { - if x != nil { - return x.Region - } - return "" -} - -func (x *DataSource_KinesisOptions) GetStreamName() string { - if x != nil { - return x.StreamName - } - return "" -} - -func (x *DataSource_KinesisOptions) GetRecordFormat() *StreamFormat { - if x != nil { - return x.RecordFormat - } - return nil -} - -var File_feast_core_DataSource_proto protoreflect.FileDescriptor - -var file_feast_core_DataSource_proto_rawDesc = []byte{ - 0x0a, 0x1b, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x44, 0x61, 0x74, - 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0a, 0x66, - 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x1a, 0x1b, 0x66, 0x65, 0x61, 0x73, 0x74, - 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x44, 0x61, 0x74, 0x61, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0xd6, 0x09, 0x0a, 0x0a, 0x44, 0x61, 0x74, 0x61, 0x53, - 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x35, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x0e, 0x32, 0x21, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, - 0x2e, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x53, 0x6f, 0x75, 0x72, - 0x63, 0x65, 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x4d, 0x0a, 0x0d, - 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, 0x6d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x18, 0x02, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, - 0x2e, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x46, 0x69, 0x65, 0x6c, - 0x64, 0x4d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0c, 0x66, - 0x69, 0x65, 0x6c, 0x64, 0x4d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x12, 0x34, 0x0a, 0x16, 0x65, - 0x76, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x5f, 0x63, - 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x14, 0x65, 0x76, 0x65, - 0x6e, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x43, 0x6f, 0x6c, 0x75, 0x6d, - 0x6e, 0x12, 0x32, 0x0a, 0x15, 0x64, 0x61, 0x74, 0x65, 0x5f, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, - 0x69, 0x6f, 0x6e, 0x5f, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x13, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x43, - 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x12, 0x38, 0x0a, 0x18, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, - 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x5f, 0x63, 0x6f, 0x6c, 0x75, 0x6d, - 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x16, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, - 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x12, - 0x47, 0x0a, 0x0c, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, - 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, - 0x72, 0x65, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x46, 0x69, - 0x6c, 0x65, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x48, 0x00, 0x52, 0x0b, 0x66, 0x69, 0x6c, - 0x65, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x53, 0x0a, 0x10, 0x62, 0x69, 0x67, 0x71, - 0x75, 0x65, 0x72, 0x79, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x0c, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, - 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x42, 0x69, 0x67, 0x51, 0x75, - 0x65, 0x72, 0x79, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x48, 0x00, 0x52, 0x0f, 0x62, 0x69, - 0x67, 0x71, 0x75, 0x65, 0x72, 0x79, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x4a, 0x0a, - 0x0d, 0x6b, 0x61, 0x66, 0x6b, 0x61, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x0d, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, - 0x65, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x4b, 0x61, 0x66, - 0x6b, 0x61, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x48, 0x00, 0x52, 0x0c, 0x6b, 0x61, 0x66, - 0x6b, 0x61, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x50, 0x0a, 0x0f, 0x6b, 0x69, 0x6e, - 0x65, 0x73, 0x69, 0x73, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x0e, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, - 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x4b, 0x69, 0x6e, 0x65, 0x73, - 0x69, 0x73, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x48, 0x00, 0x52, 0x0e, 0x6b, 0x69, 0x6e, - 0x65, 0x73, 0x69, 0x73, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x1a, 0x3f, 0x0a, 0x11, 0x46, - 0x69, 0x65, 0x6c, 0x64, 0x4d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x45, 0x6e, 0x74, 0x72, 0x79, - 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, - 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x61, 0x0a, 0x0b, - 0x46, 0x69, 0x6c, 0x65, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x37, 0x0a, 0x0b, 0x66, - 0x69, 0x6c, 0x65, 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x16, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x46, 0x69, - 0x6c, 0x65, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x52, 0x0a, 0x66, 0x69, 0x6c, 0x65, 0x46, 0x6f, - 0x72, 0x6d, 0x61, 0x74, 0x12, 0x19, 0x0a, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x75, 0x72, 0x6c, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x66, 0x69, 0x6c, 0x65, 0x55, 0x72, 0x6c, 0x1a, - 0x2e, 0x0a, 0x0f, 0x42, 0x69, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x4f, 0x70, 0x74, 0x69, 0x6f, - 0x6e, 0x73, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x72, 0x65, 0x66, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x52, 0x65, 0x66, 0x1a, - 0x92, 0x01, 0x0a, 0x0c, 0x4b, 0x61, 0x66, 0x6b, 0x61, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, - 0x12, 0x2b, 0x0a, 0x11, 0x62, 0x6f, 0x6f, 0x74, 0x73, 0x74, 0x72, 0x61, 0x70, 0x5f, 0x73, 0x65, - 0x72, 0x76, 0x65, 0x72, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x62, 0x6f, 0x6f, - 0x74, 0x73, 0x74, 0x72, 0x61, 0x70, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x73, 0x12, 0x14, 0x0a, - 0x05, 0x74, 0x6f, 0x70, 0x69, 0x63, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x74, 0x6f, - 0x70, 0x69, 0x63, 0x12, 0x3f, 0x0a, 0x0e, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x5f, 0x66, - 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x66, 0x65, - 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x46, - 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x52, 0x0d, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x46, 0x6f, - 0x72, 0x6d, 0x61, 0x74, 0x1a, 0x88, 0x01, 0x0a, 0x0e, 0x4b, 0x69, 0x6e, 0x65, 0x73, 0x69, 0x73, - 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x72, 0x65, 0x67, 0x69, 0x6f, - 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x72, 0x65, 0x67, 0x69, 0x6f, 0x6e, 0x12, - 0x1f, 0x0a, 0x0b, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x4e, 0x61, 0x6d, 0x65, - 0x12, 0x3d, 0x0a, 0x0d, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, - 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, - 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x46, 0x6f, 0x72, 0x6d, 0x61, - 0x74, 0x52, 0x0c, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x22, - 0x63, 0x0a, 0x0a, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x0b, 0x0a, - 0x07, 0x49, 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, 0x10, 0x00, 0x12, 0x0e, 0x0a, 0x0a, 0x42, 0x41, - 0x54, 0x43, 0x48, 0x5f, 0x46, 0x49, 0x4c, 0x45, 0x10, 0x01, 0x12, 0x12, 0x0a, 0x0e, 0x42, 0x41, - 0x54, 0x43, 0x48, 0x5f, 0x42, 0x49, 0x47, 0x51, 0x55, 0x45, 0x52, 0x59, 0x10, 0x02, 0x12, 0x10, - 0x0a, 0x0c, 0x53, 0x54, 0x52, 0x45, 0x41, 0x4d, 0x5f, 0x4b, 0x41, 0x46, 0x4b, 0x41, 0x10, 0x03, - 0x12, 0x12, 0x0a, 0x0e, 0x53, 0x54, 0x52, 0x45, 0x41, 0x4d, 0x5f, 0x4b, 0x49, 0x4e, 0x45, 0x53, - 0x49, 0x53, 0x10, 0x04, 0x42, 0x09, 0x0a, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x42, - 0x58, 0x0a, 0x10, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x63, - 0x6f, 0x72, 0x65, 0x42, 0x0f, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x50, - 0x72, 0x6f, 0x74, 0x6f, 0x5a, 0x33, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, - 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2d, 0x64, 0x65, 0x76, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, - 0x2f, 0x73, 0x64, 0x6b, 0x2f, 0x67, 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x66, - 0x65, 0x61, 0x73, 0x74, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x33, -} - -var ( - file_feast_core_DataSource_proto_rawDescOnce sync.Once - file_feast_core_DataSource_proto_rawDescData = file_feast_core_DataSource_proto_rawDesc -) - -func file_feast_core_DataSource_proto_rawDescGZIP() []byte { - file_feast_core_DataSource_proto_rawDescOnce.Do(func() { - file_feast_core_DataSource_proto_rawDescData = protoimpl.X.CompressGZIP(file_feast_core_DataSource_proto_rawDescData) - }) - return file_feast_core_DataSource_proto_rawDescData -} - -var file_feast_core_DataSource_proto_enumTypes = make([]protoimpl.EnumInfo, 1) -var file_feast_core_DataSource_proto_msgTypes = make([]protoimpl.MessageInfo, 6) -var file_feast_core_DataSource_proto_goTypes = []interface{}{ - (DataSource_SourceType)(0), // 0: feast.core.DataSource.SourceType - (*DataSource)(nil), // 1: feast.core.DataSource - nil, // 2: feast.core.DataSource.FieldMappingEntry - (*DataSource_FileOptions)(nil), // 3: feast.core.DataSource.FileOptions - (*DataSource_BigQueryOptions)(nil), // 4: feast.core.DataSource.BigQueryOptions - (*DataSource_KafkaOptions)(nil), // 5: feast.core.DataSource.KafkaOptions - (*DataSource_KinesisOptions)(nil), // 6: feast.core.DataSource.KinesisOptions - (*FileFormat)(nil), // 7: feast.core.FileFormat - (*StreamFormat)(nil), // 8: feast.core.StreamFormat -} -var file_feast_core_DataSource_proto_depIdxs = []int32{ - 0, // 0: feast.core.DataSource.type:type_name -> feast.core.DataSource.SourceType - 2, // 1: feast.core.DataSource.field_mapping:type_name -> feast.core.DataSource.FieldMappingEntry - 3, // 2: feast.core.DataSource.file_options:type_name -> feast.core.DataSource.FileOptions - 4, // 3: feast.core.DataSource.bigquery_options:type_name -> feast.core.DataSource.BigQueryOptions - 5, // 4: feast.core.DataSource.kafka_options:type_name -> feast.core.DataSource.KafkaOptions - 6, // 5: feast.core.DataSource.kinesis_options:type_name -> feast.core.DataSource.KinesisOptions - 7, // 6: feast.core.DataSource.FileOptions.file_format:type_name -> feast.core.FileFormat - 8, // 7: feast.core.DataSource.KafkaOptions.message_format:type_name -> feast.core.StreamFormat - 8, // 8: feast.core.DataSource.KinesisOptions.record_format:type_name -> feast.core.StreamFormat - 9, // [9:9] is the sub-list for method output_type - 9, // [9:9] is the sub-list for method input_type - 9, // [9:9] is the sub-list for extension type_name - 9, // [9:9] is the sub-list for extension extendee - 0, // [0:9] is the sub-list for field type_name -} - -func init() { file_feast_core_DataSource_proto_init() } -func file_feast_core_DataSource_proto_init() { - if File_feast_core_DataSource_proto != nil { - return - } - file_feast_core_DataFormat_proto_init() - if !protoimpl.UnsafeEnabled { - file_feast_core_DataSource_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DataSource); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_DataSource_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DataSource_FileOptions); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_DataSource_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DataSource_BigQueryOptions); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_DataSource_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DataSource_KafkaOptions); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_DataSource_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DataSource_KinesisOptions); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - file_feast_core_DataSource_proto_msgTypes[0].OneofWrappers = []interface{}{ - (*DataSource_FileOptions_)(nil), - (*DataSource_BigqueryOptions)(nil), - (*DataSource_KafkaOptions_)(nil), - (*DataSource_KinesisOptions_)(nil), - } - type x struct{} - out := protoimpl.TypeBuilder{ - File: protoimpl.DescBuilder{ - GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_feast_core_DataSource_proto_rawDesc, - NumEnums: 1, - NumMessages: 6, - NumExtensions: 0, - NumServices: 0, - }, - GoTypes: file_feast_core_DataSource_proto_goTypes, - DependencyIndexes: file_feast_core_DataSource_proto_depIdxs, - EnumInfos: file_feast_core_DataSource_proto_enumTypes, - MessageInfos: file_feast_core_DataSource_proto_msgTypes, - }.Build() - File_feast_core_DataSource_proto = out.File - file_feast_core_DataSource_proto_rawDesc = nil - file_feast_core_DataSource_proto_goTypes = nil - file_feast_core_DataSource_proto_depIdxs = nil -} diff --git a/sdk/go/protos/feast/core/Entity.pb.go b/sdk/go/protos/feast/core/Entity.pb.go deleted file mode 100644 index 0aed913325..0000000000 --- a/sdk/go/protos/feast/core/Entity.pb.go +++ /dev/null @@ -1,379 +0,0 @@ -// -// * Copyright 2020 The Feast Authors -// * -// * Licensed under the Apache License, Version 2.0 (the "License"); -// * you may not use this file except in compliance with the License. -// * You may obtain a copy of the License at -// * -// * https://www.apache.org/licenses/LICENSE-2.0 -// * -// * Unless required by applicable law or agreed to in writing, software -// * distributed under the License is distributed on an "AS IS" BASIS, -// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// * See the License for the specific language governing permissions and -// * limitations under the License. -// - -// Code generated by protoc-gen-go. DO NOT EDIT. -// versions: -// protoc-gen-go v1.25.0 -// protoc v3.12.4 -// source: feast/core/Entity.proto - -package core - -import ( - types "github.com/feast-dev/feast/sdk/go/protos/feast/types" - proto "github.com/golang/protobuf/proto" - timestamp "github.com/golang/protobuf/ptypes/timestamp" - protoreflect "google.golang.org/protobuf/reflect/protoreflect" - protoimpl "google.golang.org/protobuf/runtime/protoimpl" - reflect "reflect" - sync "sync" -) - -const ( - // Verify that this generated code is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) - // Verify that runtime/protoimpl is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) -) - -// This is a compile-time assertion that a sufficiently up-to-date version -// of the legacy proto package is being used. -const _ = proto.ProtoPackageIsVersion4 - -type Entity struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // User-specified specifications of this entity. - Spec *EntitySpecV2 `protobuf:"bytes,1,opt,name=spec,proto3" json:"spec,omitempty"` - // System-populated metadata for this entity. - Meta *EntityMeta `protobuf:"bytes,2,opt,name=meta,proto3" json:"meta,omitempty"` -} - -func (x *Entity) Reset() { - *x = Entity{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_Entity_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Entity) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Entity) ProtoMessage() {} - -func (x *Entity) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_Entity_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Entity.ProtoReflect.Descriptor instead. -func (*Entity) Descriptor() ([]byte, []int) { - return file_feast_core_Entity_proto_rawDescGZIP(), []int{0} -} - -func (x *Entity) GetSpec() *EntitySpecV2 { - if x != nil { - return x.Spec - } - return nil -} - -func (x *Entity) GetMeta() *EntityMeta { - if x != nil { - return x.Meta - } - return nil -} - -type EntitySpecV2 struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Name of the entity. - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - // Type of the entity. - ValueType types.ValueType_Enum `protobuf:"varint,2,opt,name=value_type,json=valueType,proto3,enum=feast.types.ValueType_Enum" json:"value_type,omitempty"` - // Description of the entity. - Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` - // User defined metadata - Labels map[string]string `protobuf:"bytes,8,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` -} - -func (x *EntitySpecV2) Reset() { - *x = EntitySpecV2{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_Entity_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *EntitySpecV2) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*EntitySpecV2) ProtoMessage() {} - -func (x *EntitySpecV2) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_Entity_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use EntitySpecV2.ProtoReflect.Descriptor instead. -func (*EntitySpecV2) Descriptor() ([]byte, []int) { - return file_feast_core_Entity_proto_rawDescGZIP(), []int{1} -} - -func (x *EntitySpecV2) GetName() string { - if x != nil { - return x.Name - } - return "" -} - -func (x *EntitySpecV2) GetValueType() types.ValueType_Enum { - if x != nil { - return x.ValueType - } - return types.ValueType_INVALID -} - -func (x *EntitySpecV2) GetDescription() string { - if x != nil { - return x.Description - } - return "" -} - -func (x *EntitySpecV2) GetLabels() map[string]string { - if x != nil { - return x.Labels - } - return nil -} - -type EntityMeta struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - CreatedTimestamp *timestamp.Timestamp `protobuf:"bytes,1,opt,name=created_timestamp,json=createdTimestamp,proto3" json:"created_timestamp,omitempty"` - LastUpdatedTimestamp *timestamp.Timestamp `protobuf:"bytes,2,opt,name=last_updated_timestamp,json=lastUpdatedTimestamp,proto3" json:"last_updated_timestamp,omitempty"` -} - -func (x *EntityMeta) Reset() { - *x = EntityMeta{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_Entity_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *EntityMeta) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*EntityMeta) ProtoMessage() {} - -func (x *EntityMeta) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_Entity_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use EntityMeta.ProtoReflect.Descriptor instead. -func (*EntityMeta) Descriptor() ([]byte, []int) { - return file_feast_core_Entity_proto_rawDescGZIP(), []int{2} -} - -func (x *EntityMeta) GetCreatedTimestamp() *timestamp.Timestamp { - if x != nil { - return x.CreatedTimestamp - } - return nil -} - -func (x *EntityMeta) GetLastUpdatedTimestamp() *timestamp.Timestamp { - if x != nil { - return x.LastUpdatedTimestamp - } - return nil -} - -var File_feast_core_Entity_proto protoreflect.FileDescriptor - -var file_feast_core_Entity_proto_rawDesc = []byte{ - 0x0a, 0x17, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x45, 0x6e, 0x74, - 0x69, 0x74, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0a, 0x66, 0x65, 0x61, 0x73, 0x74, - 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x1a, 0x17, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x74, 0x79, 0x70, - 0x65, 0x73, 0x2f, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1f, - 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, - 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, - 0x62, 0x0a, 0x06, 0x45, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x12, 0x2c, 0x0a, 0x04, 0x73, 0x70, 0x65, - 0x63, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, - 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x45, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x53, 0x70, 0x65, 0x63, 0x56, - 0x32, 0x52, 0x04, 0x73, 0x70, 0x65, 0x63, 0x12, 0x2a, 0x0a, 0x04, 0x6d, 0x65, 0x74, 0x61, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, - 0x72, 0x65, 0x2e, 0x45, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x4d, 0x65, 0x74, 0x61, 0x52, 0x04, 0x6d, - 0x65, 0x74, 0x61, 0x22, 0xf9, 0x01, 0x0a, 0x0c, 0x45, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x53, 0x70, - 0x65, 0x63, 0x56, 0x32, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x3a, 0x0a, 0x0a, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1b, 0x2e, 0x66, - 0x65, 0x61, 0x73, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, - 0x54, 0x79, 0x70, 0x65, 0x2e, 0x45, 0x6e, 0x75, 0x6d, 0x52, 0x09, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x54, 0x79, 0x70, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, - 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, - 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x3c, 0x0a, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, - 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, - 0x6f, 0x72, 0x65, 0x2e, 0x45, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x53, 0x70, 0x65, 0x63, 0x56, 0x32, - 0x2e, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x06, 0x6c, 0x61, - 0x62, 0x65, 0x6c, 0x73, 0x1a, 0x39, 0x0a, 0x0b, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, - 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, - 0xa7, 0x01, 0x0a, 0x0a, 0x45, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x4d, 0x65, 0x74, 0x61, 0x12, 0x47, - 0x0a, 0x11, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, - 0x61, 0x6d, 0x70, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, - 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, - 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x10, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x54, 0x69, - 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x50, 0x0a, 0x16, 0x6c, 0x61, 0x73, 0x74, 0x5f, - 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, - 0x70, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, - 0x61, 0x6d, 0x70, 0x52, 0x14, 0x6c, 0x61, 0x73, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x64, - 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x42, 0x54, 0x0a, 0x10, 0x66, 0x65, 0x61, - 0x73, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x42, 0x0b, 0x45, - 0x6e, 0x74, 0x69, 0x74, 0x79, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x5a, 0x33, 0x67, 0x69, 0x74, 0x68, - 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2d, 0x64, 0x65, 0x76, - 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x73, 0x64, 0x6b, 0x2f, 0x67, 0x6f, 0x2f, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x62, - 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} - -var ( - file_feast_core_Entity_proto_rawDescOnce sync.Once - file_feast_core_Entity_proto_rawDescData = file_feast_core_Entity_proto_rawDesc -) - -func file_feast_core_Entity_proto_rawDescGZIP() []byte { - file_feast_core_Entity_proto_rawDescOnce.Do(func() { - file_feast_core_Entity_proto_rawDescData = protoimpl.X.CompressGZIP(file_feast_core_Entity_proto_rawDescData) - }) - return file_feast_core_Entity_proto_rawDescData -} - -var file_feast_core_Entity_proto_msgTypes = make([]protoimpl.MessageInfo, 4) -var file_feast_core_Entity_proto_goTypes = []interface{}{ - (*Entity)(nil), // 0: feast.core.Entity - (*EntitySpecV2)(nil), // 1: feast.core.EntitySpecV2 - (*EntityMeta)(nil), // 2: feast.core.EntityMeta - nil, // 3: feast.core.EntitySpecV2.LabelsEntry - (types.ValueType_Enum)(0), // 4: feast.types.ValueType.Enum - (*timestamp.Timestamp)(nil), // 5: google.protobuf.Timestamp -} -var file_feast_core_Entity_proto_depIdxs = []int32{ - 1, // 0: feast.core.Entity.spec:type_name -> feast.core.EntitySpecV2 - 2, // 1: feast.core.Entity.meta:type_name -> feast.core.EntityMeta - 4, // 2: feast.core.EntitySpecV2.value_type:type_name -> feast.types.ValueType.Enum - 3, // 3: feast.core.EntitySpecV2.labels:type_name -> feast.core.EntitySpecV2.LabelsEntry - 5, // 4: feast.core.EntityMeta.created_timestamp:type_name -> google.protobuf.Timestamp - 5, // 5: feast.core.EntityMeta.last_updated_timestamp:type_name -> google.protobuf.Timestamp - 6, // [6:6] is the sub-list for method output_type - 6, // [6:6] is the sub-list for method input_type - 6, // [6:6] is the sub-list for extension type_name - 6, // [6:6] is the sub-list for extension extendee - 0, // [0:6] is the sub-list for field type_name -} - -func init() { file_feast_core_Entity_proto_init() } -func file_feast_core_Entity_proto_init() { - if File_feast_core_Entity_proto != nil { - return - } - if !protoimpl.UnsafeEnabled { - file_feast_core_Entity_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Entity); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_Entity_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*EntitySpecV2); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_Entity_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*EntityMeta); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - type x struct{} - out := protoimpl.TypeBuilder{ - File: protoimpl.DescBuilder{ - GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_feast_core_Entity_proto_rawDesc, - NumEnums: 0, - NumMessages: 4, - NumExtensions: 0, - NumServices: 0, - }, - GoTypes: file_feast_core_Entity_proto_goTypes, - DependencyIndexes: file_feast_core_Entity_proto_depIdxs, - MessageInfos: file_feast_core_Entity_proto_msgTypes, - }.Build() - File_feast_core_Entity_proto = out.File - file_feast_core_Entity_proto_rawDesc = nil - file_feast_core_Entity_proto_goTypes = nil - file_feast_core_Entity_proto_depIdxs = nil -} diff --git a/sdk/go/protos/feast/core/Feature.pb.go b/sdk/go/protos/feast/core/Feature.pb.go deleted file mode 100644 index 1ad93ef8a1..0000000000 --- a/sdk/go/protos/feast/core/Feature.pb.go +++ /dev/null @@ -1,205 +0,0 @@ -// -// Copyright 2020 The Feast Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -// Code generated by protoc-gen-go. DO NOT EDIT. -// versions: -// protoc-gen-go v1.25.0 -// protoc v3.12.4 -// source: feast/core/Feature.proto - -package core - -import ( - types "github.com/feast-dev/feast/sdk/go/protos/feast/types" - proto "github.com/golang/protobuf/proto" - protoreflect "google.golang.org/protobuf/reflect/protoreflect" - protoimpl "google.golang.org/protobuf/runtime/protoimpl" - reflect "reflect" - sync "sync" -) - -const ( - // Verify that this generated code is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) - // Verify that runtime/protoimpl is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) -) - -// This is a compile-time assertion that a sufficiently up-to-date version -// of the legacy proto package is being used. -const _ = proto.ProtoPackageIsVersion4 - -type FeatureSpecV2 struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Name of the feature. Not updatable. - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - // Value type of the feature. Not updatable. - ValueType types.ValueType_Enum `protobuf:"varint,2,opt,name=value_type,json=valueType,proto3,enum=feast.types.ValueType_Enum" json:"value_type,omitempty"` - // Labels for user defined metadata on a feature - Labels map[string]string `protobuf:"bytes,3,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` -} - -func (x *FeatureSpecV2) Reset() { - *x = FeatureSpecV2{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_Feature_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *FeatureSpecV2) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*FeatureSpecV2) ProtoMessage() {} - -func (x *FeatureSpecV2) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_Feature_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use FeatureSpecV2.ProtoReflect.Descriptor instead. -func (*FeatureSpecV2) Descriptor() ([]byte, []int) { - return file_feast_core_Feature_proto_rawDescGZIP(), []int{0} -} - -func (x *FeatureSpecV2) GetName() string { - if x != nil { - return x.Name - } - return "" -} - -func (x *FeatureSpecV2) GetValueType() types.ValueType_Enum { - if x != nil { - return x.ValueType - } - return types.ValueType_INVALID -} - -func (x *FeatureSpecV2) GetLabels() map[string]string { - if x != nil { - return x.Labels - } - return nil -} - -var File_feast_core_Feature_proto protoreflect.FileDescriptor - -var file_feast_core_Feature_proto_rawDesc = []byte{ - 0x0a, 0x18, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x46, 0x65, 0x61, - 0x74, 0x75, 0x72, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0a, 0x66, 0x65, 0x61, 0x73, - 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x1a, 0x17, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x74, 0x79, - 0x70, 0x65, 0x73, 0x2f, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, - 0xd9, 0x01, 0x0a, 0x0d, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x70, 0x65, 0x63, 0x56, - 0x32, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x3a, 0x0a, 0x0a, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5f, 0x74, - 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1b, 0x2e, 0x66, 0x65, 0x61, 0x73, - 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x54, 0x79, 0x70, - 0x65, 0x2e, 0x45, 0x6e, 0x75, 0x6d, 0x52, 0x09, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x54, 0x79, 0x70, - 0x65, 0x12, 0x3d, 0x0a, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x25, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x46, - 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x70, 0x65, 0x63, 0x56, 0x32, 0x2e, 0x4c, 0x61, 0x62, - 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, - 0x1a, 0x39, 0x0a, 0x0b, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, - 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, - 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x42, 0x55, 0x0a, 0x10, 0x66, - 0x65, 0x61, 0x73, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x42, - 0x0c, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x5a, 0x33, 0x67, - 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2d, - 0x64, 0x65, 0x76, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x73, 0x64, 0x6b, 0x2f, 0x67, 0x6f, - 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x63, 0x6f, - 0x72, 0x65, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} - -var ( - file_feast_core_Feature_proto_rawDescOnce sync.Once - file_feast_core_Feature_proto_rawDescData = file_feast_core_Feature_proto_rawDesc -) - -func file_feast_core_Feature_proto_rawDescGZIP() []byte { - file_feast_core_Feature_proto_rawDescOnce.Do(func() { - file_feast_core_Feature_proto_rawDescData = protoimpl.X.CompressGZIP(file_feast_core_Feature_proto_rawDescData) - }) - return file_feast_core_Feature_proto_rawDescData -} - -var file_feast_core_Feature_proto_msgTypes = make([]protoimpl.MessageInfo, 2) -var file_feast_core_Feature_proto_goTypes = []interface{}{ - (*FeatureSpecV2)(nil), // 0: feast.core.FeatureSpecV2 - nil, // 1: feast.core.FeatureSpecV2.LabelsEntry - (types.ValueType_Enum)(0), // 2: feast.types.ValueType.Enum -} -var file_feast_core_Feature_proto_depIdxs = []int32{ - 2, // 0: feast.core.FeatureSpecV2.value_type:type_name -> feast.types.ValueType.Enum - 1, // 1: feast.core.FeatureSpecV2.labels:type_name -> feast.core.FeatureSpecV2.LabelsEntry - 2, // [2:2] is the sub-list for method output_type - 2, // [2:2] is the sub-list for method input_type - 2, // [2:2] is the sub-list for extension type_name - 2, // [2:2] is the sub-list for extension extendee - 0, // [0:2] is the sub-list for field type_name -} - -func init() { file_feast_core_Feature_proto_init() } -func file_feast_core_Feature_proto_init() { - if File_feast_core_Feature_proto != nil { - return - } - if !protoimpl.UnsafeEnabled { - file_feast_core_Feature_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*FeatureSpecV2); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - type x struct{} - out := protoimpl.TypeBuilder{ - File: protoimpl.DescBuilder{ - GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_feast_core_Feature_proto_rawDesc, - NumEnums: 0, - NumMessages: 2, - NumExtensions: 0, - NumServices: 0, - }, - GoTypes: file_feast_core_Feature_proto_goTypes, - DependencyIndexes: file_feast_core_Feature_proto_depIdxs, - MessageInfos: file_feast_core_Feature_proto_msgTypes, - }.Build() - File_feast_core_Feature_proto = out.File - file_feast_core_Feature_proto_rawDesc = nil - file_feast_core_Feature_proto_goTypes = nil - file_feast_core_Feature_proto_depIdxs = nil -} diff --git a/sdk/go/protos/feast/core/FeatureSet.pb.go b/sdk/go/protos/feast/core/FeatureSet.pb.go deleted file mode 100644 index 520c4881be..0000000000 --- a/sdk/go/protos/feast/core/FeatureSet.pb.go +++ /dev/null @@ -1,1141 +0,0 @@ -// -// * Copyright 2019 The Feast Authors -// * -// * Licensed under the Apache License, Version 2.0 (the "License"); -// * you may not use this file except in compliance with the License. -// * You may obtain a copy of the License at -// * -// * https://www.apache.org/licenses/LICENSE-2.0 -// * -// * Unless required by applicable law or agreed to in writing, software -// * distributed under the License is distributed on an "AS IS" BASIS, -// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// * See the License for the specific language governing permissions and -// * limitations under the License. -// - -// Code generated by protoc-gen-go. DO NOT EDIT. -// versions: -// protoc-gen-go v1.25.0 -// protoc v3.10.0 -// source: feast/core/FeatureSet.proto - -package core - -import ( - types "github.com/feast-dev/feast/sdk/go/protos/feast/types" - v0 "github.com/feast-dev/feast/sdk/go/protos/tensorflow_metadata/proto/v0" - proto "github.com/golang/protobuf/proto" - duration "github.com/golang/protobuf/ptypes/duration" - timestamp "github.com/golang/protobuf/ptypes/timestamp" - protoreflect "google.golang.org/protobuf/reflect/protoreflect" - protoimpl "google.golang.org/protobuf/runtime/protoimpl" - reflect "reflect" - sync "sync" -) - -const ( - // Verify that this generated code is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) - // Verify that runtime/protoimpl is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) -) - -// This is a compile-time assertion that a sufficiently up-to-date version -// of the legacy proto package is being used. -const _ = proto.ProtoPackageIsVersion4 - -type FeatureSetStatus int32 - -const ( - FeatureSetStatus_STATUS_INVALID FeatureSetStatus = 0 - FeatureSetStatus_STATUS_PENDING FeatureSetStatus = 1 - FeatureSetStatus_STATUS_JOB_STARTING FeatureSetStatus = 3 - FeatureSetStatus_STATUS_READY FeatureSetStatus = 2 -) - -// Enum value maps for FeatureSetStatus. -var ( - FeatureSetStatus_name = map[int32]string{ - 0: "STATUS_INVALID", - 1: "STATUS_PENDING", - 3: "STATUS_JOB_STARTING", - 2: "STATUS_READY", - } - FeatureSetStatus_value = map[string]int32{ - "STATUS_INVALID": 0, - "STATUS_PENDING": 1, - "STATUS_JOB_STARTING": 3, - "STATUS_READY": 2, - } -) - -func (x FeatureSetStatus) Enum() *FeatureSetStatus { - p := new(FeatureSetStatus) - *p = x - return p -} - -func (x FeatureSetStatus) String() string { - return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) -} - -func (FeatureSetStatus) Descriptor() protoreflect.EnumDescriptor { - return file_feast_core_FeatureSet_proto_enumTypes[0].Descriptor() -} - -func (FeatureSetStatus) Type() protoreflect.EnumType { - return &file_feast_core_FeatureSet_proto_enumTypes[0] -} - -func (x FeatureSetStatus) Number() protoreflect.EnumNumber { - return protoreflect.EnumNumber(x) -} - -// Deprecated: Use FeatureSetStatus.Descriptor instead. -func (FeatureSetStatus) EnumDescriptor() ([]byte, []int) { - return file_feast_core_FeatureSet_proto_rawDescGZIP(), []int{0} -} - -type FeatureSetJobDeliveryStatus int32 - -const ( - FeatureSetJobDeliveryStatus_STATUS_IN_PROGRESS FeatureSetJobDeliveryStatus = 0 - FeatureSetJobDeliveryStatus_STATUS_DELIVERED FeatureSetJobDeliveryStatus = 1 -) - -// Enum value maps for FeatureSetJobDeliveryStatus. -var ( - FeatureSetJobDeliveryStatus_name = map[int32]string{ - 0: "STATUS_IN_PROGRESS", - 1: "STATUS_DELIVERED", - } - FeatureSetJobDeliveryStatus_value = map[string]int32{ - "STATUS_IN_PROGRESS": 0, - "STATUS_DELIVERED": 1, - } -) - -func (x FeatureSetJobDeliveryStatus) Enum() *FeatureSetJobDeliveryStatus { - p := new(FeatureSetJobDeliveryStatus) - *p = x - return p -} - -func (x FeatureSetJobDeliveryStatus) String() string { - return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) -} - -func (FeatureSetJobDeliveryStatus) Descriptor() protoreflect.EnumDescriptor { - return file_feast_core_FeatureSet_proto_enumTypes[1].Descriptor() -} - -func (FeatureSetJobDeliveryStatus) Type() protoreflect.EnumType { - return &file_feast_core_FeatureSet_proto_enumTypes[1] -} - -func (x FeatureSetJobDeliveryStatus) Number() protoreflect.EnumNumber { - return protoreflect.EnumNumber(x) -} - -// Deprecated: Use FeatureSetJobDeliveryStatus.Descriptor instead. -func (FeatureSetJobDeliveryStatus) EnumDescriptor() ([]byte, []int) { - return file_feast_core_FeatureSet_proto_rawDescGZIP(), []int{1} -} - -type FeatureSet struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // User-specified specifications of this feature set. - Spec *FeatureSetSpec `protobuf:"bytes,1,opt,name=spec,proto3" json:"spec,omitempty"` - // System-populated metadata for this feature set. - Meta *FeatureSetMeta `protobuf:"bytes,2,opt,name=meta,proto3" json:"meta,omitempty"` -} - -func (x *FeatureSet) Reset() { - *x = FeatureSet{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_FeatureSet_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *FeatureSet) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*FeatureSet) ProtoMessage() {} - -func (x *FeatureSet) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_FeatureSet_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use FeatureSet.ProtoReflect.Descriptor instead. -func (*FeatureSet) Descriptor() ([]byte, []int) { - return file_feast_core_FeatureSet_proto_rawDescGZIP(), []int{0} -} - -func (x *FeatureSet) GetSpec() *FeatureSetSpec { - if x != nil { - return x.Spec - } - return nil -} - -func (x *FeatureSet) GetMeta() *FeatureSetMeta { - if x != nil { - return x.Meta - } - return nil -} - -type FeatureSetSpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Name of project that this feature set belongs to. - Project string `protobuf:"bytes,7,opt,name=project,proto3" json:"project,omitempty"` - // Name of the feature set. Must be unique. - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - // List of entities contained within this featureSet. - // This allows the feature to be used during joins between feature sets. - // If the featureSet is ingested into a store that supports keys, this value - // will be made a key. - Entities []*EntitySpec `protobuf:"bytes,3,rep,name=entities,proto3" json:"entities,omitempty"` - // List of features contained within this featureSet. - Features []*FeatureSpec `protobuf:"bytes,4,rep,name=features,proto3" json:"features,omitempty"` - // Features in this feature set will only be retrieved if they are found - // after [time - max_age]. Missing or older feature values will be returned - // as nulls and indicated to end user - MaxAge *duration.Duration `protobuf:"bytes,5,opt,name=max_age,json=maxAge,proto3" json:"max_age,omitempty"` - // Optional. Source on which feature rows can be found. - // If not set, source will be set to the default value configured in Feast Core. - Source *Source `protobuf:"bytes,6,opt,name=source,proto3" json:"source,omitempty"` - // User defined metadata - Labels map[string]string `protobuf:"bytes,8,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - // Read-only self-incrementing version that increases monotonically - // when changes are made to a feature set - Version int32 `protobuf:"varint,9,opt,name=version,proto3" json:"version,omitempty"` -} - -func (x *FeatureSetSpec) Reset() { - *x = FeatureSetSpec{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_FeatureSet_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *FeatureSetSpec) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*FeatureSetSpec) ProtoMessage() {} - -func (x *FeatureSetSpec) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_FeatureSet_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use FeatureSetSpec.ProtoReflect.Descriptor instead. -func (*FeatureSetSpec) Descriptor() ([]byte, []int) { - return file_feast_core_FeatureSet_proto_rawDescGZIP(), []int{1} -} - -func (x *FeatureSetSpec) GetProject() string { - if x != nil { - return x.Project - } - return "" -} - -func (x *FeatureSetSpec) GetName() string { - if x != nil { - return x.Name - } - return "" -} - -func (x *FeatureSetSpec) GetEntities() []*EntitySpec { - if x != nil { - return x.Entities - } - return nil -} - -func (x *FeatureSetSpec) GetFeatures() []*FeatureSpec { - if x != nil { - return x.Features - } - return nil -} - -func (x *FeatureSetSpec) GetMaxAge() *duration.Duration { - if x != nil { - return x.MaxAge - } - return nil -} - -func (x *FeatureSetSpec) GetSource() *Source { - if x != nil { - return x.Source - } - return nil -} - -func (x *FeatureSetSpec) GetLabels() map[string]string { - if x != nil { - return x.Labels - } - return nil -} - -func (x *FeatureSetSpec) GetVersion() int32 { - if x != nil { - return x.Version - } - return 0 -} - -type EntitySpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Name of the entity. - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - // Value type of the entity. - ValueType types.ValueType_Enum `protobuf:"varint,2,opt,name=value_type,json=valueType,proto3,enum=feast.types.ValueType_Enum" json:"value_type,omitempty"` -} - -func (x *EntitySpec) Reset() { - *x = EntitySpec{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_FeatureSet_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *EntitySpec) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*EntitySpec) ProtoMessage() {} - -func (x *EntitySpec) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_FeatureSet_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use EntitySpec.ProtoReflect.Descriptor instead. -func (*EntitySpec) Descriptor() ([]byte, []int) { - return file_feast_core_FeatureSet_proto_rawDescGZIP(), []int{2} -} - -func (x *EntitySpec) GetName() string { - if x != nil { - return x.Name - } - return "" -} - -func (x *EntitySpec) GetValueType() types.ValueType_Enum { - if x != nil { - return x.ValueType - } - return types.ValueType_INVALID -} - -type FeatureSpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Name of the feature. - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - // Value type of the feature. - ValueType types.ValueType_Enum `protobuf:"varint,2,opt,name=value_type,json=valueType,proto3,enum=feast.types.ValueType_Enum" json:"value_type,omitempty"` - // Labels for user defined metadata on a feature - Labels map[string]string `protobuf:"bytes,16,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - // Types that are assignable to PresenceConstraints: - // *FeatureSpec_Presence - // *FeatureSpec_GroupPresence - PresenceConstraints isFeatureSpec_PresenceConstraints `protobuf_oneof:"presence_constraints"` - // The shape of the feature which governs the number of values that appear in - // each example. - // - // Types that are assignable to ShapeType: - // *FeatureSpec_Shape - // *FeatureSpec_ValueCount - ShapeType isFeatureSpec_ShapeType `protobuf_oneof:"shape_type"` - // Domain for the values of the feature. - // - // Types that are assignable to DomainInfo: - // *FeatureSpec_Domain - // *FeatureSpec_IntDomain - // *FeatureSpec_FloatDomain - // *FeatureSpec_StringDomain - // *FeatureSpec_BoolDomain - // *FeatureSpec_StructDomain - // *FeatureSpec_NaturalLanguageDomain - // *FeatureSpec_ImageDomain - // *FeatureSpec_MidDomain - // *FeatureSpec_UrlDomain - // *FeatureSpec_TimeDomain - // *FeatureSpec_TimeOfDayDomain - DomainInfo isFeatureSpec_DomainInfo `protobuf_oneof:"domain_info"` -} - -func (x *FeatureSpec) Reset() { - *x = FeatureSpec{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_FeatureSet_proto_msgTypes[3] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *FeatureSpec) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*FeatureSpec) ProtoMessage() {} - -func (x *FeatureSpec) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_FeatureSet_proto_msgTypes[3] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use FeatureSpec.ProtoReflect.Descriptor instead. -func (*FeatureSpec) Descriptor() ([]byte, []int) { - return file_feast_core_FeatureSet_proto_rawDescGZIP(), []int{3} -} - -func (x *FeatureSpec) GetName() string { - if x != nil { - return x.Name - } - return "" -} - -func (x *FeatureSpec) GetValueType() types.ValueType_Enum { - if x != nil { - return x.ValueType - } - return types.ValueType_INVALID -} - -func (x *FeatureSpec) GetLabels() map[string]string { - if x != nil { - return x.Labels - } - return nil -} - -func (m *FeatureSpec) GetPresenceConstraints() isFeatureSpec_PresenceConstraints { - if m != nil { - return m.PresenceConstraints - } - return nil -} - -func (x *FeatureSpec) GetPresence() *v0.FeaturePresence { - if x, ok := x.GetPresenceConstraints().(*FeatureSpec_Presence); ok { - return x.Presence - } - return nil -} - -func (x *FeatureSpec) GetGroupPresence() *v0.FeaturePresenceWithinGroup { - if x, ok := x.GetPresenceConstraints().(*FeatureSpec_GroupPresence); ok { - return x.GroupPresence - } - return nil -} - -func (m *FeatureSpec) GetShapeType() isFeatureSpec_ShapeType { - if m != nil { - return m.ShapeType - } - return nil -} - -func (x *FeatureSpec) GetShape() *v0.FixedShape { - if x, ok := x.GetShapeType().(*FeatureSpec_Shape); ok { - return x.Shape - } - return nil -} - -func (x *FeatureSpec) GetValueCount() *v0.ValueCount { - if x, ok := x.GetShapeType().(*FeatureSpec_ValueCount); ok { - return x.ValueCount - } - return nil -} - -func (m *FeatureSpec) GetDomainInfo() isFeatureSpec_DomainInfo { - if m != nil { - return m.DomainInfo - } - return nil -} - -func (x *FeatureSpec) GetDomain() string { - if x, ok := x.GetDomainInfo().(*FeatureSpec_Domain); ok { - return x.Domain - } - return "" -} - -func (x *FeatureSpec) GetIntDomain() *v0.IntDomain { - if x, ok := x.GetDomainInfo().(*FeatureSpec_IntDomain); ok { - return x.IntDomain - } - return nil -} - -func (x *FeatureSpec) GetFloatDomain() *v0.FloatDomain { - if x, ok := x.GetDomainInfo().(*FeatureSpec_FloatDomain); ok { - return x.FloatDomain - } - return nil -} - -func (x *FeatureSpec) GetStringDomain() *v0.StringDomain { - if x, ok := x.GetDomainInfo().(*FeatureSpec_StringDomain); ok { - return x.StringDomain - } - return nil -} - -func (x *FeatureSpec) GetBoolDomain() *v0.BoolDomain { - if x, ok := x.GetDomainInfo().(*FeatureSpec_BoolDomain); ok { - return x.BoolDomain - } - return nil -} - -func (x *FeatureSpec) GetStructDomain() *v0.StructDomain { - if x, ok := x.GetDomainInfo().(*FeatureSpec_StructDomain); ok { - return x.StructDomain - } - return nil -} - -func (x *FeatureSpec) GetNaturalLanguageDomain() *v0.NaturalLanguageDomain { - if x, ok := x.GetDomainInfo().(*FeatureSpec_NaturalLanguageDomain); ok { - return x.NaturalLanguageDomain - } - return nil -} - -func (x *FeatureSpec) GetImageDomain() *v0.ImageDomain { - if x, ok := x.GetDomainInfo().(*FeatureSpec_ImageDomain); ok { - return x.ImageDomain - } - return nil -} - -func (x *FeatureSpec) GetMidDomain() *v0.MIDDomain { - if x, ok := x.GetDomainInfo().(*FeatureSpec_MidDomain); ok { - return x.MidDomain - } - return nil -} - -func (x *FeatureSpec) GetUrlDomain() *v0.URLDomain { - if x, ok := x.GetDomainInfo().(*FeatureSpec_UrlDomain); ok { - return x.UrlDomain - } - return nil -} - -func (x *FeatureSpec) GetTimeDomain() *v0.TimeDomain { - if x, ok := x.GetDomainInfo().(*FeatureSpec_TimeDomain); ok { - return x.TimeDomain - } - return nil -} - -func (x *FeatureSpec) GetTimeOfDayDomain() *v0.TimeOfDayDomain { - if x, ok := x.GetDomainInfo().(*FeatureSpec_TimeOfDayDomain); ok { - return x.TimeOfDayDomain - } - return nil -} - -type isFeatureSpec_PresenceConstraints interface { - isFeatureSpec_PresenceConstraints() -} - -type FeatureSpec_Presence struct { - // Constraints on the presence of this feature in the examples. - Presence *v0.FeaturePresence `protobuf:"bytes,30,opt,name=presence,proto3,oneof"` -} - -type FeatureSpec_GroupPresence struct { - // Only used in the context of a "group" context, e.g., inside a sequence. - GroupPresence *v0.FeaturePresenceWithinGroup `protobuf:"bytes,31,opt,name=group_presence,json=groupPresence,proto3,oneof"` -} - -func (*FeatureSpec_Presence) isFeatureSpec_PresenceConstraints() {} - -func (*FeatureSpec_GroupPresence) isFeatureSpec_PresenceConstraints() {} - -type isFeatureSpec_ShapeType interface { - isFeatureSpec_ShapeType() -} - -type FeatureSpec_Shape struct { - // The feature has a fixed shape corresponding to a multi-dimensional - // tensor. - Shape *v0.FixedShape `protobuf:"bytes,32,opt,name=shape,proto3,oneof"` -} - -type FeatureSpec_ValueCount struct { - // The feature doesn't have a well defined shape. All we know are limits on - // the minimum and maximum number of values. - ValueCount *v0.ValueCount `protobuf:"bytes,33,opt,name=value_count,json=valueCount,proto3,oneof"` -} - -func (*FeatureSpec_Shape) isFeatureSpec_ShapeType() {} - -func (*FeatureSpec_ValueCount) isFeatureSpec_ShapeType() {} - -type isFeatureSpec_DomainInfo interface { - isFeatureSpec_DomainInfo() -} - -type FeatureSpec_Domain struct { - // Reference to a domain defined at the schema level. - Domain string `protobuf:"bytes,34,opt,name=domain,proto3,oneof"` -} - -type FeatureSpec_IntDomain struct { - // Inline definitions of domains. - IntDomain *v0.IntDomain `protobuf:"bytes,35,opt,name=int_domain,json=intDomain,proto3,oneof"` -} - -type FeatureSpec_FloatDomain struct { - FloatDomain *v0.FloatDomain `protobuf:"bytes,36,opt,name=float_domain,json=floatDomain,proto3,oneof"` -} - -type FeatureSpec_StringDomain struct { - StringDomain *v0.StringDomain `protobuf:"bytes,37,opt,name=string_domain,json=stringDomain,proto3,oneof"` -} - -type FeatureSpec_BoolDomain struct { - BoolDomain *v0.BoolDomain `protobuf:"bytes,38,opt,name=bool_domain,json=boolDomain,proto3,oneof"` -} - -type FeatureSpec_StructDomain struct { - StructDomain *v0.StructDomain `protobuf:"bytes,39,opt,name=struct_domain,json=structDomain,proto3,oneof"` -} - -type FeatureSpec_NaturalLanguageDomain struct { - // Supported semantic domains. - NaturalLanguageDomain *v0.NaturalLanguageDomain `protobuf:"bytes,40,opt,name=natural_language_domain,json=naturalLanguageDomain,proto3,oneof"` -} - -type FeatureSpec_ImageDomain struct { - ImageDomain *v0.ImageDomain `protobuf:"bytes,41,opt,name=image_domain,json=imageDomain,proto3,oneof"` -} - -type FeatureSpec_MidDomain struct { - MidDomain *v0.MIDDomain `protobuf:"bytes,42,opt,name=mid_domain,json=midDomain,proto3,oneof"` -} - -type FeatureSpec_UrlDomain struct { - UrlDomain *v0.URLDomain `protobuf:"bytes,43,opt,name=url_domain,json=urlDomain,proto3,oneof"` -} - -type FeatureSpec_TimeDomain struct { - TimeDomain *v0.TimeDomain `protobuf:"bytes,44,opt,name=time_domain,json=timeDomain,proto3,oneof"` -} - -type FeatureSpec_TimeOfDayDomain struct { - TimeOfDayDomain *v0.TimeOfDayDomain `protobuf:"bytes,45,opt,name=time_of_day_domain,json=timeOfDayDomain,proto3,oneof"` -} - -func (*FeatureSpec_Domain) isFeatureSpec_DomainInfo() {} - -func (*FeatureSpec_IntDomain) isFeatureSpec_DomainInfo() {} - -func (*FeatureSpec_FloatDomain) isFeatureSpec_DomainInfo() {} - -func (*FeatureSpec_StringDomain) isFeatureSpec_DomainInfo() {} - -func (*FeatureSpec_BoolDomain) isFeatureSpec_DomainInfo() {} - -func (*FeatureSpec_StructDomain) isFeatureSpec_DomainInfo() {} - -func (*FeatureSpec_NaturalLanguageDomain) isFeatureSpec_DomainInfo() {} - -func (*FeatureSpec_ImageDomain) isFeatureSpec_DomainInfo() {} - -func (*FeatureSpec_MidDomain) isFeatureSpec_DomainInfo() {} - -func (*FeatureSpec_UrlDomain) isFeatureSpec_DomainInfo() {} - -func (*FeatureSpec_TimeDomain) isFeatureSpec_DomainInfo() {} - -func (*FeatureSpec_TimeOfDayDomain) isFeatureSpec_DomainInfo() {} - -type FeatureSetMeta struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Created timestamp of this specific feature set. - CreatedTimestamp *timestamp.Timestamp `protobuf:"bytes,1,opt,name=created_timestamp,json=createdTimestamp,proto3" json:"created_timestamp,omitempty"` - // Status of the feature set. - // Used to indicate whether the feature set is ready for consumption or ingestion. - // Currently supports 2 states: - // 1) STATUS_PENDING - A feature set is in pending state if Feast has not spun up the jobs - // necessary to push rows for this feature set to stores subscribing to this feature set. - // 2) STATUS_READY - Feature set is ready for consumption or ingestion - Status FeatureSetStatus `protobuf:"varint,2,opt,name=status,proto3,enum=feast.core.FeatureSetStatus" json:"status,omitempty"` -} - -func (x *FeatureSetMeta) Reset() { - *x = FeatureSetMeta{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_FeatureSet_proto_msgTypes[4] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *FeatureSetMeta) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*FeatureSetMeta) ProtoMessage() {} - -func (x *FeatureSetMeta) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_FeatureSet_proto_msgTypes[4] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use FeatureSetMeta.ProtoReflect.Descriptor instead. -func (*FeatureSetMeta) Descriptor() ([]byte, []int) { - return file_feast_core_FeatureSet_proto_rawDescGZIP(), []int{4} -} - -func (x *FeatureSetMeta) GetCreatedTimestamp() *timestamp.Timestamp { - if x != nil { - return x.CreatedTimestamp - } - return nil -} - -func (x *FeatureSetMeta) GetStatus() FeatureSetStatus { - if x != nil { - return x.Status - } - return FeatureSetStatus_STATUS_INVALID -} - -var File_feast_core_FeatureSet_proto protoreflect.FileDescriptor - -var file_feast_core_FeatureSet_proto_rawDesc = []byte{ - 0x0a, 0x1b, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x46, 0x65, 0x61, - 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0a, 0x66, - 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x1a, 0x17, 0x66, 0x65, 0x61, 0x73, 0x74, - 0x2f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x1a, 0x17, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x53, - 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1e, 0x67, 0x6f, 0x6f, - 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x64, 0x75, 0x72, - 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, - 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d, - 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x29, 0x74, 0x65, - 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, - 0x61, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x76, 0x30, 0x2f, 0x73, 0x63, 0x68, 0x65, 0x6d, - 0x61, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x6c, 0x0a, 0x0a, 0x46, 0x65, 0x61, 0x74, 0x75, - 0x72, 0x65, 0x53, 0x65, 0x74, 0x12, 0x2e, 0x0a, 0x04, 0x73, 0x70, 0x65, 0x63, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, - 0x2e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x53, 0x70, 0x65, 0x63, 0x52, - 0x04, 0x73, 0x70, 0x65, 0x63, 0x12, 0x2e, 0x0a, 0x04, 0x6d, 0x65, 0x74, 0x61, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, - 0x2e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x52, - 0x04, 0x6d, 0x65, 0x74, 0x61, 0x22, 0xa2, 0x03, 0x0a, 0x0e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, - 0x65, 0x53, 0x65, 0x74, 0x53, 0x70, 0x65, 0x63, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x72, 0x6f, 0x6a, - 0x65, 0x63, 0x74, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, 0x72, 0x6f, 0x6a, 0x65, - 0x63, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x32, 0x0a, 0x08, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x69, - 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, - 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x45, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x53, 0x70, 0x65, 0x63, - 0x52, 0x08, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x69, 0x65, 0x73, 0x12, 0x33, 0x0a, 0x08, 0x66, 0x65, - 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x66, - 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, - 0x65, 0x53, 0x70, 0x65, 0x63, 0x52, 0x08, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x12, - 0x32, 0x0a, 0x07, 0x6d, 0x61, 0x78, 0x5f, 0x61, 0x67, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x19, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, - 0x75, 0x66, 0x2e, 0x44, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x06, 0x6d, 0x61, 0x78, - 0x41, 0x67, 0x65, 0x12, 0x2a, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x06, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, - 0x2e, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, - 0x3e, 0x0a, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, - 0x26, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x46, 0x65, 0x61, - 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x4c, 0x61, 0x62, 0x65, - 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x12, - 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x09, 0x20, 0x01, 0x28, 0x05, - 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x1a, 0x39, 0x0a, 0x0b, 0x4c, 0x61, 0x62, - 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x3a, 0x02, 0x38, 0x01, 0x4a, 0x04, 0x08, 0x02, 0x10, 0x03, 0x22, 0x5c, 0x0a, 0x0a, 0x45, 0x6e, - 0x74, 0x69, 0x74, 0x79, 0x53, 0x70, 0x65, 0x63, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x3a, 0x0a, 0x0a, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, - 0x32, 0x1b, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2e, 0x56, - 0x61, 0x6c, 0x75, 0x65, 0x54, 0x79, 0x70, 0x65, 0x2e, 0x45, 0x6e, 0x75, 0x6d, 0x52, 0x09, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x54, 0x79, 0x70, 0x65, 0x22, 0xa0, 0x0b, 0x0a, 0x0b, 0x46, 0x65, 0x61, - 0x74, 0x75, 0x72, 0x65, 0x53, 0x70, 0x65, 0x63, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x3a, 0x0a, 0x0a, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, - 0x32, 0x1b, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2e, 0x56, - 0x61, 0x6c, 0x75, 0x65, 0x54, 0x79, 0x70, 0x65, 0x2e, 0x45, 0x6e, 0x75, 0x6d, 0x52, 0x09, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x3b, 0x0a, 0x06, 0x6c, 0x61, 0x62, 0x65, - 0x6c, 0x73, 0x18, 0x10, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, - 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x70, 0x65, - 0x63, 0x2e, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x06, 0x6c, - 0x61, 0x62, 0x65, 0x6c, 0x73, 0x12, 0x45, 0x0a, 0x08, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, - 0x65, 0x18, 0x1e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, - 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, - 0x2e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x50, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, - 0x48, 0x00, 0x52, 0x08, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x12, 0x5b, 0x0a, 0x0e, - 0x67, 0x72, 0x6f, 0x75, 0x70, 0x5f, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x1f, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, - 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x46, 0x65, - 0x61, 0x74, 0x75, 0x72, 0x65, 0x50, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x57, 0x69, 0x74, - 0x68, 0x69, 0x6e, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x48, 0x00, 0x52, 0x0d, 0x67, 0x72, 0x6f, 0x75, - 0x70, 0x50, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x12, 0x3a, 0x0a, 0x05, 0x73, 0x68, 0x61, - 0x70, 0x65, 0x18, 0x20, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, - 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, - 0x30, 0x2e, 0x46, 0x69, 0x78, 0x65, 0x64, 0x53, 0x68, 0x61, 0x70, 0x65, 0x48, 0x01, 0x52, 0x05, - 0x73, 0x68, 0x61, 0x70, 0x65, 0x12, 0x45, 0x0a, 0x0b, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5f, 0x63, - 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x21, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x74, 0x65, 0x6e, - 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, - 0x2e, 0x76, 0x30, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x48, 0x01, - 0x52, 0x0a, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x18, 0x0a, 0x06, - 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x22, 0x20, 0x01, 0x28, 0x09, 0x48, 0x02, 0x52, 0x06, - 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x42, 0x0a, 0x0a, 0x69, 0x6e, 0x74, 0x5f, 0x64, 0x6f, - 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x23, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x74, 0x65, 0x6e, - 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, - 0x2e, 0x76, 0x30, 0x2e, 0x49, 0x6e, 0x74, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x48, 0x02, 0x52, - 0x09, 0x69, 0x6e, 0x74, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x48, 0x0a, 0x0c, 0x66, 0x6c, - 0x6f, 0x61, 0x74, 0x5f, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x24, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x23, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, - 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x44, - 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x48, 0x02, 0x52, 0x0b, 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x44, 0x6f, - 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x4b, 0x0a, 0x0d, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x64, - 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x25, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x74, 0x65, - 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, - 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x44, 0x6f, 0x6d, 0x61, 0x69, - 0x6e, 0x48, 0x02, 0x52, 0x0c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x44, 0x6f, 0x6d, 0x61, 0x69, - 0x6e, 0x12, 0x45, 0x0a, 0x0b, 0x62, 0x6f, 0x6f, 0x6c, 0x5f, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, - 0x18, 0x26, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, - 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, - 0x42, 0x6f, 0x6f, 0x6c, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x48, 0x02, 0x52, 0x0a, 0x62, 0x6f, - 0x6f, 0x6c, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x4b, 0x0a, 0x0d, 0x73, 0x74, 0x72, 0x75, - 0x63, 0x74, 0x5f, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x27, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x24, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, - 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x44, - 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x48, 0x02, 0x52, 0x0c, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x44, - 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x67, 0x0a, 0x17, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x61, 0x6c, - 0x5f, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x5f, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, - 0x18, 0x28, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, - 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, - 0x4e, 0x61, 0x74, 0x75, 0x72, 0x61, 0x6c, 0x4c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x44, - 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x48, 0x02, 0x52, 0x15, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x61, 0x6c, - 0x4c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x48, - 0x0a, 0x0c, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x29, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, - 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x49, 0x6d, - 0x61, 0x67, 0x65, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x48, 0x02, 0x52, 0x0b, 0x69, 0x6d, 0x61, - 0x67, 0x65, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x42, 0x0a, 0x0a, 0x6d, 0x69, 0x64, 0x5f, - 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x2a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x74, - 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, - 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x4d, 0x49, 0x44, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x48, - 0x02, 0x52, 0x09, 0x6d, 0x69, 0x64, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x42, 0x0a, 0x0a, - 0x75, 0x72, 0x6c, 0x5f, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x2b, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x21, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, - 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x55, 0x52, 0x4c, 0x44, 0x6f, 0x6d, - 0x61, 0x69, 0x6e, 0x48, 0x02, 0x52, 0x09, 0x75, 0x72, 0x6c, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, - 0x12, 0x45, 0x0a, 0x0b, 0x74, 0x69, 0x6d, 0x65, 0x5f, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, - 0x2c, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, - 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x54, - 0x69, 0x6d, 0x65, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x48, 0x02, 0x52, 0x0a, 0x74, 0x69, 0x6d, - 0x65, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x56, 0x0a, 0x12, 0x74, 0x69, 0x6d, 0x65, 0x5f, - 0x6f, 0x66, 0x5f, 0x64, 0x61, 0x79, 0x5f, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x2d, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, - 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x54, 0x69, 0x6d, - 0x65, 0x4f, 0x66, 0x44, 0x61, 0x79, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x48, 0x02, 0x52, 0x0f, - 0x74, 0x69, 0x6d, 0x65, 0x4f, 0x66, 0x44, 0x61, 0x79, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x1a, - 0x39, 0x0a, 0x0b, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, - 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, - 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x42, 0x16, 0x0a, 0x14, 0x70, 0x72, - 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x61, 0x69, 0x6e, - 0x74, 0x73, 0x42, 0x0c, 0x0a, 0x0a, 0x73, 0x68, 0x61, 0x70, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, - 0x42, 0x0d, 0x0a, 0x0b, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x5f, 0x69, 0x6e, 0x66, 0x6f, 0x4a, - 0x04, 0x08, 0x03, 0x10, 0x10, 0x4a, 0x04, 0x08, 0x11, 0x10, 0x1e, 0x22, 0x8f, 0x01, 0x0a, 0x0e, - 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x12, 0x47, - 0x0a, 0x11, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, - 0x61, 0x6d, 0x70, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, - 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, - 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x10, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x54, 0x69, - 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x34, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, - 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1c, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, - 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x53, - 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x2a, 0x65, 0x0a, - 0x10, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, 0x75, - 0x73, 0x12, 0x12, 0x0a, 0x0e, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x49, 0x4e, 0x56, 0x41, - 0x4c, 0x49, 0x44, 0x10, 0x00, 0x12, 0x12, 0x0a, 0x0e, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, - 0x50, 0x45, 0x4e, 0x44, 0x49, 0x4e, 0x47, 0x10, 0x01, 0x12, 0x17, 0x0a, 0x13, 0x53, 0x54, 0x41, - 0x54, 0x55, 0x53, 0x5f, 0x4a, 0x4f, 0x42, 0x5f, 0x53, 0x54, 0x41, 0x52, 0x54, 0x49, 0x4e, 0x47, - 0x10, 0x03, 0x12, 0x10, 0x0a, 0x0c, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x52, 0x45, 0x41, - 0x44, 0x59, 0x10, 0x02, 0x2a, 0x4b, 0x0a, 0x1b, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, - 0x65, 0x74, 0x4a, 0x6f, 0x62, 0x44, 0x65, 0x6c, 0x69, 0x76, 0x65, 0x72, 0x79, 0x53, 0x74, 0x61, - 0x74, 0x75, 0x73, 0x12, 0x16, 0x0a, 0x12, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x49, 0x4e, - 0x5f, 0x50, 0x52, 0x4f, 0x47, 0x52, 0x45, 0x53, 0x53, 0x10, 0x00, 0x12, 0x14, 0x0a, 0x10, 0x53, - 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x44, 0x45, 0x4c, 0x49, 0x56, 0x45, 0x52, 0x45, 0x44, 0x10, - 0x01, 0x42, 0x58, 0x0a, 0x10, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x42, 0x0f, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, - 0x74, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x5a, 0x33, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, - 0x6f, 0x6d, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2d, 0x64, 0x65, 0x76, 0x2f, 0x66, 0x65, 0x61, - 0x73, 0x74, 0x2f, 0x73, 0x64, 0x6b, 0x2f, 0x67, 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, - 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x62, 0x06, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x33, -} - -var ( - file_feast_core_FeatureSet_proto_rawDescOnce sync.Once - file_feast_core_FeatureSet_proto_rawDescData = file_feast_core_FeatureSet_proto_rawDesc -) - -func file_feast_core_FeatureSet_proto_rawDescGZIP() []byte { - file_feast_core_FeatureSet_proto_rawDescOnce.Do(func() { - file_feast_core_FeatureSet_proto_rawDescData = protoimpl.X.CompressGZIP(file_feast_core_FeatureSet_proto_rawDescData) - }) - return file_feast_core_FeatureSet_proto_rawDescData -} - -var file_feast_core_FeatureSet_proto_enumTypes = make([]protoimpl.EnumInfo, 2) -var file_feast_core_FeatureSet_proto_msgTypes = make([]protoimpl.MessageInfo, 7) -var file_feast_core_FeatureSet_proto_goTypes = []interface{}{ - (FeatureSetStatus)(0), // 0: feast.core.FeatureSetStatus - (FeatureSetJobDeliveryStatus)(0), // 1: feast.core.FeatureSetJobDeliveryStatus - (*FeatureSet)(nil), // 2: feast.core.FeatureSet - (*FeatureSetSpec)(nil), // 3: feast.core.FeatureSetSpec - (*EntitySpec)(nil), // 4: feast.core.EntitySpec - (*FeatureSpec)(nil), // 5: feast.core.FeatureSpec - (*FeatureSetMeta)(nil), // 6: feast.core.FeatureSetMeta - nil, // 7: feast.core.FeatureSetSpec.LabelsEntry - nil, // 8: feast.core.FeatureSpec.LabelsEntry - (*duration.Duration)(nil), // 9: google.protobuf.Duration - (*Source)(nil), // 10: feast.core.Source - (types.ValueType_Enum)(0), // 11: feast.types.ValueType.Enum - (*v0.FeaturePresence)(nil), // 12: tensorflow.metadata.v0.FeaturePresence - (*v0.FeaturePresenceWithinGroup)(nil), // 13: tensorflow.metadata.v0.FeaturePresenceWithinGroup - (*v0.FixedShape)(nil), // 14: tensorflow.metadata.v0.FixedShape - (*v0.ValueCount)(nil), // 15: tensorflow.metadata.v0.ValueCount - (*v0.IntDomain)(nil), // 16: tensorflow.metadata.v0.IntDomain - (*v0.FloatDomain)(nil), // 17: tensorflow.metadata.v0.FloatDomain - (*v0.StringDomain)(nil), // 18: tensorflow.metadata.v0.StringDomain - (*v0.BoolDomain)(nil), // 19: tensorflow.metadata.v0.BoolDomain - (*v0.StructDomain)(nil), // 20: tensorflow.metadata.v0.StructDomain - (*v0.NaturalLanguageDomain)(nil), // 21: tensorflow.metadata.v0.NaturalLanguageDomain - (*v0.ImageDomain)(nil), // 22: tensorflow.metadata.v0.ImageDomain - (*v0.MIDDomain)(nil), // 23: tensorflow.metadata.v0.MIDDomain - (*v0.URLDomain)(nil), // 24: tensorflow.metadata.v0.URLDomain - (*v0.TimeDomain)(nil), // 25: tensorflow.metadata.v0.TimeDomain - (*v0.TimeOfDayDomain)(nil), // 26: tensorflow.metadata.v0.TimeOfDayDomain - (*timestamp.Timestamp)(nil), // 27: google.protobuf.Timestamp -} -var file_feast_core_FeatureSet_proto_depIdxs = []int32{ - 3, // 0: feast.core.FeatureSet.spec:type_name -> feast.core.FeatureSetSpec - 6, // 1: feast.core.FeatureSet.meta:type_name -> feast.core.FeatureSetMeta - 4, // 2: feast.core.FeatureSetSpec.entities:type_name -> feast.core.EntitySpec - 5, // 3: feast.core.FeatureSetSpec.features:type_name -> feast.core.FeatureSpec - 9, // 4: feast.core.FeatureSetSpec.max_age:type_name -> google.protobuf.Duration - 10, // 5: feast.core.FeatureSetSpec.source:type_name -> feast.core.Source - 7, // 6: feast.core.FeatureSetSpec.labels:type_name -> feast.core.FeatureSetSpec.LabelsEntry - 11, // 7: feast.core.EntitySpec.value_type:type_name -> feast.types.ValueType.Enum - 11, // 8: feast.core.FeatureSpec.value_type:type_name -> feast.types.ValueType.Enum - 8, // 9: feast.core.FeatureSpec.labels:type_name -> feast.core.FeatureSpec.LabelsEntry - 12, // 10: feast.core.FeatureSpec.presence:type_name -> tensorflow.metadata.v0.FeaturePresence - 13, // 11: feast.core.FeatureSpec.group_presence:type_name -> tensorflow.metadata.v0.FeaturePresenceWithinGroup - 14, // 12: feast.core.FeatureSpec.shape:type_name -> tensorflow.metadata.v0.FixedShape - 15, // 13: feast.core.FeatureSpec.value_count:type_name -> tensorflow.metadata.v0.ValueCount - 16, // 14: feast.core.FeatureSpec.int_domain:type_name -> tensorflow.metadata.v0.IntDomain - 17, // 15: feast.core.FeatureSpec.float_domain:type_name -> tensorflow.metadata.v0.FloatDomain - 18, // 16: feast.core.FeatureSpec.string_domain:type_name -> tensorflow.metadata.v0.StringDomain - 19, // 17: feast.core.FeatureSpec.bool_domain:type_name -> tensorflow.metadata.v0.BoolDomain - 20, // 18: feast.core.FeatureSpec.struct_domain:type_name -> tensorflow.metadata.v0.StructDomain - 21, // 19: feast.core.FeatureSpec.natural_language_domain:type_name -> tensorflow.metadata.v0.NaturalLanguageDomain - 22, // 20: feast.core.FeatureSpec.image_domain:type_name -> tensorflow.metadata.v0.ImageDomain - 23, // 21: feast.core.FeatureSpec.mid_domain:type_name -> tensorflow.metadata.v0.MIDDomain - 24, // 22: feast.core.FeatureSpec.url_domain:type_name -> tensorflow.metadata.v0.URLDomain - 25, // 23: feast.core.FeatureSpec.time_domain:type_name -> tensorflow.metadata.v0.TimeDomain - 26, // 24: feast.core.FeatureSpec.time_of_day_domain:type_name -> tensorflow.metadata.v0.TimeOfDayDomain - 27, // 25: feast.core.FeatureSetMeta.created_timestamp:type_name -> google.protobuf.Timestamp - 0, // 26: feast.core.FeatureSetMeta.status:type_name -> feast.core.FeatureSetStatus - 27, // [27:27] is the sub-list for method output_type - 27, // [27:27] is the sub-list for method input_type - 27, // [27:27] is the sub-list for extension type_name - 27, // [27:27] is the sub-list for extension extendee - 0, // [0:27] is the sub-list for field type_name -} - -func init() { file_feast_core_FeatureSet_proto_init() } -func file_feast_core_FeatureSet_proto_init() { - if File_feast_core_FeatureSet_proto != nil { - return - } - file_feast_core_Source_proto_init() - if !protoimpl.UnsafeEnabled { - file_feast_core_FeatureSet_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*FeatureSet); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_FeatureSet_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*FeatureSetSpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_FeatureSet_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*EntitySpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_FeatureSet_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*FeatureSpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_FeatureSet_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*FeatureSetMeta); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - file_feast_core_FeatureSet_proto_msgTypes[3].OneofWrappers = []interface{}{ - (*FeatureSpec_Presence)(nil), - (*FeatureSpec_GroupPresence)(nil), - (*FeatureSpec_Shape)(nil), - (*FeatureSpec_ValueCount)(nil), - (*FeatureSpec_Domain)(nil), - (*FeatureSpec_IntDomain)(nil), - (*FeatureSpec_FloatDomain)(nil), - (*FeatureSpec_StringDomain)(nil), - (*FeatureSpec_BoolDomain)(nil), - (*FeatureSpec_StructDomain)(nil), - (*FeatureSpec_NaturalLanguageDomain)(nil), - (*FeatureSpec_ImageDomain)(nil), - (*FeatureSpec_MidDomain)(nil), - (*FeatureSpec_UrlDomain)(nil), - (*FeatureSpec_TimeDomain)(nil), - (*FeatureSpec_TimeOfDayDomain)(nil), - } - type x struct{} - out := protoimpl.TypeBuilder{ - File: protoimpl.DescBuilder{ - GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_feast_core_FeatureSet_proto_rawDesc, - NumEnums: 2, - NumMessages: 7, - NumExtensions: 0, - NumServices: 0, - }, - GoTypes: file_feast_core_FeatureSet_proto_goTypes, - DependencyIndexes: file_feast_core_FeatureSet_proto_depIdxs, - EnumInfos: file_feast_core_FeatureSet_proto_enumTypes, - MessageInfos: file_feast_core_FeatureSet_proto_msgTypes, - }.Build() - File_feast_core_FeatureSet_proto = out.File - file_feast_core_FeatureSet_proto_rawDesc = nil - file_feast_core_FeatureSet_proto_goTypes = nil - file_feast_core_FeatureSet_proto_depIdxs = nil -} diff --git a/sdk/go/protos/feast/core/FeatureSetReference.pb.go b/sdk/go/protos/feast/core/FeatureSetReference.pb.go deleted file mode 100644 index ca640aa0fb..0000000000 --- a/sdk/go/protos/feast/core/FeatureSetReference.pb.go +++ /dev/null @@ -1,183 +0,0 @@ -// -// Copyright 2020 The Feast Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -// Code generated by protoc-gen-go. DO NOT EDIT. -// versions: -// protoc-gen-go v1.25.0 -// protoc v3.10.0 -// source: feast/core/FeatureSetReference.proto - -package core - -import ( - proto "github.com/golang/protobuf/proto" - protoreflect "google.golang.org/protobuf/reflect/protoreflect" - protoimpl "google.golang.org/protobuf/runtime/protoimpl" - reflect "reflect" - sync "sync" -) - -const ( - // Verify that this generated code is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) - // Verify that runtime/protoimpl is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) -) - -// This is a compile-time assertion that a sufficiently up-to-date version -// of the legacy proto package is being used. -const _ = proto.ProtoPackageIsVersion4 - -// Defines a composite key that refers to a unique FeatureSet -type FeatureSetReference struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Name of the project - Project string `protobuf:"bytes,1,opt,name=project,proto3" json:"project,omitempty"` - // Name of the FeatureSet - Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` -} - -func (x *FeatureSetReference) Reset() { - *x = FeatureSetReference{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_FeatureSetReference_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *FeatureSetReference) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*FeatureSetReference) ProtoMessage() {} - -func (x *FeatureSetReference) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_FeatureSetReference_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use FeatureSetReference.ProtoReflect.Descriptor instead. -func (*FeatureSetReference) Descriptor() ([]byte, []int) { - return file_feast_core_FeatureSetReference_proto_rawDescGZIP(), []int{0} -} - -func (x *FeatureSetReference) GetProject() string { - if x != nil { - return x.Project - } - return "" -} - -func (x *FeatureSetReference) GetName() string { - if x != nil { - return x.Name - } - return "" -} - -var File_feast_core_FeatureSetReference_proto protoreflect.FileDescriptor - -var file_feast_core_FeatureSetReference_proto_rawDesc = []byte{ - 0x0a, 0x24, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x46, 0x65, 0x61, - 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0a, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, - 0x72, 0x65, 0x22, 0x49, 0x0a, 0x13, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, - 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x72, 0x6f, - 0x6a, 0x65, 0x63, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, 0x72, 0x6f, 0x6a, - 0x65, 0x63, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x4a, 0x04, 0x08, 0x03, 0x10, 0x04, 0x42, 0x61, 0x0a, - 0x10, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x63, 0x6f, 0x72, - 0x65, 0x42, 0x18, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x52, 0x65, 0x66, - 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x5a, 0x33, 0x67, 0x69, 0x74, - 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2d, 0x64, 0x65, - 0x76, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x73, 0x64, 0x6b, 0x2f, 0x67, 0x6f, 0x2f, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x63, 0x6f, 0x72, 0x65, - 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} - -var ( - file_feast_core_FeatureSetReference_proto_rawDescOnce sync.Once - file_feast_core_FeatureSetReference_proto_rawDescData = file_feast_core_FeatureSetReference_proto_rawDesc -) - -func file_feast_core_FeatureSetReference_proto_rawDescGZIP() []byte { - file_feast_core_FeatureSetReference_proto_rawDescOnce.Do(func() { - file_feast_core_FeatureSetReference_proto_rawDescData = protoimpl.X.CompressGZIP(file_feast_core_FeatureSetReference_proto_rawDescData) - }) - return file_feast_core_FeatureSetReference_proto_rawDescData -} - -var file_feast_core_FeatureSetReference_proto_msgTypes = make([]protoimpl.MessageInfo, 1) -var file_feast_core_FeatureSetReference_proto_goTypes = []interface{}{ - (*FeatureSetReference)(nil), // 0: feast.core.FeatureSetReference -} -var file_feast_core_FeatureSetReference_proto_depIdxs = []int32{ - 0, // [0:0] is the sub-list for method output_type - 0, // [0:0] is the sub-list for method input_type - 0, // [0:0] is the sub-list for extension type_name - 0, // [0:0] is the sub-list for extension extendee - 0, // [0:0] is the sub-list for field type_name -} - -func init() { file_feast_core_FeatureSetReference_proto_init() } -func file_feast_core_FeatureSetReference_proto_init() { - if File_feast_core_FeatureSetReference_proto != nil { - return - } - if !protoimpl.UnsafeEnabled { - file_feast_core_FeatureSetReference_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*FeatureSetReference); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - type x struct{} - out := protoimpl.TypeBuilder{ - File: protoimpl.DescBuilder{ - GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_feast_core_FeatureSetReference_proto_rawDesc, - NumEnums: 0, - NumMessages: 1, - NumExtensions: 0, - NumServices: 0, - }, - GoTypes: file_feast_core_FeatureSetReference_proto_goTypes, - DependencyIndexes: file_feast_core_FeatureSetReference_proto_depIdxs, - MessageInfos: file_feast_core_FeatureSetReference_proto_msgTypes, - }.Build() - File_feast_core_FeatureSetReference_proto = out.File - file_feast_core_FeatureSetReference_proto_rawDesc = nil - file_feast_core_FeatureSetReference_proto_goTypes = nil - file_feast_core_FeatureSetReference_proto_depIdxs = nil -} diff --git a/sdk/go/protos/feast/core/FeatureTable.pb.go b/sdk/go/protos/feast/core/FeatureTable.pb.go deleted file mode 100644 index 0c4fa1b1f0..0000000000 --- a/sdk/go/protos/feast/core/FeatureTable.pb.go +++ /dev/null @@ -1,461 +0,0 @@ -// -// Copyright 2020 The Feast Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -// Code generated by protoc-gen-go. DO NOT EDIT. -// versions: -// protoc-gen-go v1.25.0 -// protoc v3.12.4 -// source: feast/core/FeatureTable.proto - -package core - -import ( - proto "github.com/golang/protobuf/proto" - duration "github.com/golang/protobuf/ptypes/duration" - timestamp "github.com/golang/protobuf/ptypes/timestamp" - protoreflect "google.golang.org/protobuf/reflect/protoreflect" - protoimpl "google.golang.org/protobuf/runtime/protoimpl" - reflect "reflect" - sync "sync" -) - -const ( - // Verify that this generated code is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) - // Verify that runtime/protoimpl is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) -) - -// This is a compile-time assertion that a sufficiently up-to-date version -// of the legacy proto package is being used. -const _ = proto.ProtoPackageIsVersion4 - -type FeatureTable struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // User-specified specifications of this feature table. - Spec *FeatureTableSpec `protobuf:"bytes,1,opt,name=spec,proto3" json:"spec,omitempty"` - // System-populated metadata for this feature table. - Meta *FeatureTableMeta `protobuf:"bytes,2,opt,name=meta,proto3" json:"meta,omitempty"` -} - -func (x *FeatureTable) Reset() { - *x = FeatureTable{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_FeatureTable_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *FeatureTable) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*FeatureTable) ProtoMessage() {} - -func (x *FeatureTable) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_FeatureTable_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use FeatureTable.ProtoReflect.Descriptor instead. -func (*FeatureTable) Descriptor() ([]byte, []int) { - return file_feast_core_FeatureTable_proto_rawDescGZIP(), []int{0} -} - -func (x *FeatureTable) GetSpec() *FeatureTableSpec { - if x != nil { - return x.Spec - } - return nil -} - -func (x *FeatureTable) GetMeta() *FeatureTableMeta { - if x != nil { - return x.Meta - } - return nil -} - -type FeatureTableSpec struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Name of the feature table. Must be unique. Not updated. - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - // List names of entities to associate with the Features defined in this - // Feature Table. Not updatable. - Entities []string `protobuf:"bytes,3,rep,name=entities,proto3" json:"entities,omitempty"` - // List of features specifications for each feature defined with this feature table. - Features []*FeatureSpecV2 `protobuf:"bytes,4,rep,name=features,proto3" json:"features,omitempty"` - // User defined metadata - Labels map[string]string `protobuf:"bytes,5,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - // Features in this feature table can only be retrieved from online serving - // younger than max age. Age is measured as the duration of time between - // the feature's event timestamp and when the feature is retrieved - // Feature values outside max age will be returned as unset values and indicated to end user - MaxAge *duration.Duration `protobuf:"bytes,6,opt,name=max_age,json=maxAge,proto3" json:"max_age,omitempty"` - // Batch/Offline DataSource to source batch/offline feature data. - // Only batch DataSource can be specified - // (ie source type should start with 'BATCH_') - BatchSource *DataSource `protobuf:"bytes,7,opt,name=batch_source,json=batchSource,proto3" json:"batch_source,omitempty"` - // Stream/Online DataSource to source stream/online feature data. - // Only stream DataSource can be specified - // (ie source type should start with 'STREAM_') - StreamSource *DataSource `protobuf:"bytes,8,opt,name=stream_source,json=streamSource,proto3" json:"stream_source,omitempty"` -} - -func (x *FeatureTableSpec) Reset() { - *x = FeatureTableSpec{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_FeatureTable_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *FeatureTableSpec) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*FeatureTableSpec) ProtoMessage() {} - -func (x *FeatureTableSpec) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_FeatureTable_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use FeatureTableSpec.ProtoReflect.Descriptor instead. -func (*FeatureTableSpec) Descriptor() ([]byte, []int) { - return file_feast_core_FeatureTable_proto_rawDescGZIP(), []int{1} -} - -func (x *FeatureTableSpec) GetName() string { - if x != nil { - return x.Name - } - return "" -} - -func (x *FeatureTableSpec) GetEntities() []string { - if x != nil { - return x.Entities - } - return nil -} - -func (x *FeatureTableSpec) GetFeatures() []*FeatureSpecV2 { - if x != nil { - return x.Features - } - return nil -} - -func (x *FeatureTableSpec) GetLabels() map[string]string { - if x != nil { - return x.Labels - } - return nil -} - -func (x *FeatureTableSpec) GetMaxAge() *duration.Duration { - if x != nil { - return x.MaxAge - } - return nil -} - -func (x *FeatureTableSpec) GetBatchSource() *DataSource { - if x != nil { - return x.BatchSource - } - return nil -} - -func (x *FeatureTableSpec) GetStreamSource() *DataSource { - if x != nil { - return x.StreamSource - } - return nil -} - -type FeatureTableMeta struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Time where this Feature Table is created - CreatedTimestamp *timestamp.Timestamp `protobuf:"bytes,1,opt,name=created_timestamp,json=createdTimestamp,proto3" json:"created_timestamp,omitempty"` - // Time where this Feature Table is last updated - LastUpdatedTimestamp *timestamp.Timestamp `protobuf:"bytes,2,opt,name=last_updated_timestamp,json=lastUpdatedTimestamp,proto3" json:"last_updated_timestamp,omitempty"` - // Auto incrementing revision no. of this Feature Table - Revision int64 `protobuf:"varint,3,opt,name=revision,proto3" json:"revision,omitempty"` - // Hash entities, features, batch_source and stream_source to inform JobService if - // jobs should be restarted should hash change - Hash string `protobuf:"bytes,4,opt,name=hash,proto3" json:"hash,omitempty"` -} - -func (x *FeatureTableMeta) Reset() { - *x = FeatureTableMeta{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_FeatureTable_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *FeatureTableMeta) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*FeatureTableMeta) ProtoMessage() {} - -func (x *FeatureTableMeta) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_FeatureTable_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use FeatureTableMeta.ProtoReflect.Descriptor instead. -func (*FeatureTableMeta) Descriptor() ([]byte, []int) { - return file_feast_core_FeatureTable_proto_rawDescGZIP(), []int{2} -} - -func (x *FeatureTableMeta) GetCreatedTimestamp() *timestamp.Timestamp { - if x != nil { - return x.CreatedTimestamp - } - return nil -} - -func (x *FeatureTableMeta) GetLastUpdatedTimestamp() *timestamp.Timestamp { - if x != nil { - return x.LastUpdatedTimestamp - } - return nil -} - -func (x *FeatureTableMeta) GetRevision() int64 { - if x != nil { - return x.Revision - } - return 0 -} - -func (x *FeatureTableMeta) GetHash() string { - if x != nil { - return x.Hash - } - return "" -} - -var File_feast_core_FeatureTable_proto protoreflect.FileDescriptor - -var file_feast_core_FeatureTable_proto_rawDesc = []byte{ - 0x0a, 0x1d, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x46, 0x65, 0x61, - 0x74, 0x75, 0x72, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, - 0x0a, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x1a, 0x1e, 0x67, 0x6f, 0x6f, - 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x64, 0x75, 0x72, - 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, - 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d, - 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1b, 0x66, 0x65, - 0x61, 0x73, 0x74, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x18, 0x66, 0x65, 0x61, 0x73, 0x74, - 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x22, 0x72, 0x0a, 0x0c, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x54, 0x61, - 0x62, 0x6c, 0x65, 0x12, 0x30, 0x0a, 0x04, 0x73, 0x70, 0x65, 0x63, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x1c, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x46, - 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x53, 0x70, 0x65, 0x63, 0x52, - 0x04, 0x73, 0x70, 0x65, 0x63, 0x12, 0x30, 0x0a, 0x04, 0x6d, 0x65, 0x74, 0x61, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, - 0x2e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x4d, 0x65, 0x74, - 0x61, 0x52, 0x04, 0x6d, 0x65, 0x74, 0x61, 0x22, 0xa2, 0x03, 0x0a, 0x10, 0x46, 0x65, 0x61, 0x74, - 0x75, 0x72, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x53, 0x70, 0x65, 0x63, 0x12, 0x12, 0x0a, 0x04, - 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, - 0x12, 0x1a, 0x0a, 0x08, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x69, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, - 0x28, 0x09, 0x52, 0x08, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x69, 0x65, 0x73, 0x12, 0x35, 0x0a, 0x08, - 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, - 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x46, 0x65, 0x61, 0x74, - 0x75, 0x72, 0x65, 0x53, 0x70, 0x65, 0x63, 0x56, 0x32, 0x52, 0x08, 0x66, 0x65, 0x61, 0x74, 0x75, - 0x72, 0x65, 0x73, 0x12, 0x40, 0x0a, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, 0x05, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, - 0x2e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x53, 0x70, 0x65, - 0x63, 0x2e, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x06, 0x6c, - 0x61, 0x62, 0x65, 0x6c, 0x73, 0x12, 0x32, 0x0a, 0x07, 0x6d, 0x61, 0x78, 0x5f, 0x61, 0x67, 0x65, - 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x44, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x52, 0x06, 0x6d, 0x61, 0x78, 0x41, 0x67, 0x65, 0x12, 0x39, 0x0a, 0x0c, 0x62, 0x61, 0x74, - 0x63, 0x68, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x16, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x44, 0x61, 0x74, - 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x0b, 0x62, 0x61, 0x74, 0x63, 0x68, 0x53, 0x6f, - 0x75, 0x72, 0x63, 0x65, 0x12, 0x3b, 0x0a, 0x0d, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x5f, 0x73, - 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x66, 0x65, - 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x52, 0x0c, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x53, 0x6f, 0x75, 0x72, 0x63, - 0x65, 0x1a, 0x39, 0x0a, 0x0b, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, - 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, - 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xdd, 0x01, 0x0a, - 0x10, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x4d, 0x65, 0x74, - 0x61, 0x12, 0x47, 0x0a, 0x11, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x74, 0x69, 0x6d, - 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, - 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, - 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x10, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, - 0x64, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x50, 0x0a, 0x16, 0x6c, 0x61, - 0x73, 0x74, 0x5f, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x73, - 0x74, 0x61, 0x6d, 0x70, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, - 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, - 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x14, 0x6c, 0x61, 0x73, 0x74, 0x55, 0x70, 0x64, 0x61, - 0x74, 0x65, 0x64, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x1a, 0x0a, 0x08, - 0x72, 0x65, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, - 0x72, 0x65, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x68, 0x61, 0x73, 0x68, - 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x68, 0x61, 0x73, 0x68, 0x42, 0x5a, 0x0a, 0x10, - 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x63, 0x6f, 0x72, 0x65, - 0x42, 0x11, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x50, 0x72, - 0x6f, 0x74, 0x6f, 0x5a, 0x33, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, - 0x66, 0x65, 0x61, 0x73, 0x74, 0x2d, 0x64, 0x65, 0x76, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, - 0x73, 0x64, 0x6b, 0x2f, 0x67, 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x66, 0x65, - 0x61, 0x73, 0x74, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} - -var ( - file_feast_core_FeatureTable_proto_rawDescOnce sync.Once - file_feast_core_FeatureTable_proto_rawDescData = file_feast_core_FeatureTable_proto_rawDesc -) - -func file_feast_core_FeatureTable_proto_rawDescGZIP() []byte { - file_feast_core_FeatureTable_proto_rawDescOnce.Do(func() { - file_feast_core_FeatureTable_proto_rawDescData = protoimpl.X.CompressGZIP(file_feast_core_FeatureTable_proto_rawDescData) - }) - return file_feast_core_FeatureTable_proto_rawDescData -} - -var file_feast_core_FeatureTable_proto_msgTypes = make([]protoimpl.MessageInfo, 4) -var file_feast_core_FeatureTable_proto_goTypes = []interface{}{ - (*FeatureTable)(nil), // 0: feast.core.FeatureTable - (*FeatureTableSpec)(nil), // 1: feast.core.FeatureTableSpec - (*FeatureTableMeta)(nil), // 2: feast.core.FeatureTableMeta - nil, // 3: feast.core.FeatureTableSpec.LabelsEntry - (*FeatureSpecV2)(nil), // 4: feast.core.FeatureSpecV2 - (*duration.Duration)(nil), // 5: google.protobuf.Duration - (*DataSource)(nil), // 6: feast.core.DataSource - (*timestamp.Timestamp)(nil), // 7: google.protobuf.Timestamp -} -var file_feast_core_FeatureTable_proto_depIdxs = []int32{ - 1, // 0: feast.core.FeatureTable.spec:type_name -> feast.core.FeatureTableSpec - 2, // 1: feast.core.FeatureTable.meta:type_name -> feast.core.FeatureTableMeta - 4, // 2: feast.core.FeatureTableSpec.features:type_name -> feast.core.FeatureSpecV2 - 3, // 3: feast.core.FeatureTableSpec.labels:type_name -> feast.core.FeatureTableSpec.LabelsEntry - 5, // 4: feast.core.FeatureTableSpec.max_age:type_name -> google.protobuf.Duration - 6, // 5: feast.core.FeatureTableSpec.batch_source:type_name -> feast.core.DataSource - 6, // 6: feast.core.FeatureTableSpec.stream_source:type_name -> feast.core.DataSource - 7, // 7: feast.core.FeatureTableMeta.created_timestamp:type_name -> google.protobuf.Timestamp - 7, // 8: feast.core.FeatureTableMeta.last_updated_timestamp:type_name -> google.protobuf.Timestamp - 9, // [9:9] is the sub-list for method output_type - 9, // [9:9] is the sub-list for method input_type - 9, // [9:9] is the sub-list for extension type_name - 9, // [9:9] is the sub-list for extension extendee - 0, // [0:9] is the sub-list for field type_name -} - -func init() { file_feast_core_FeatureTable_proto_init() } -func file_feast_core_FeatureTable_proto_init() { - if File_feast_core_FeatureTable_proto != nil { - return - } - file_feast_core_DataSource_proto_init() - file_feast_core_Feature_proto_init() - if !protoimpl.UnsafeEnabled { - file_feast_core_FeatureTable_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*FeatureTable); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_FeatureTable_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*FeatureTableSpec); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_FeatureTable_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*FeatureTableMeta); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - type x struct{} - out := protoimpl.TypeBuilder{ - File: protoimpl.DescBuilder{ - GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_feast_core_FeatureTable_proto_rawDesc, - NumEnums: 0, - NumMessages: 4, - NumExtensions: 0, - NumServices: 0, - }, - GoTypes: file_feast_core_FeatureTable_proto_goTypes, - DependencyIndexes: file_feast_core_FeatureTable_proto_depIdxs, - MessageInfos: file_feast_core_FeatureTable_proto_msgTypes, - }.Build() - File_feast_core_FeatureTable_proto = out.File - file_feast_core_FeatureTable_proto_rawDesc = nil - file_feast_core_FeatureTable_proto_goTypes = nil - file_feast_core_FeatureTable_proto_depIdxs = nil -} diff --git a/sdk/go/protos/feast/core/IngestionJob.pb.go b/sdk/go/protos/feast/core/IngestionJob.pb.go deleted file mode 100644 index a716bdf089..0000000000 --- a/sdk/go/protos/feast/core/IngestionJob.pb.go +++ /dev/null @@ -1,506 +0,0 @@ -// -// Copyright 2020 The Feast Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -// Code generated by protoc-gen-go. DO NOT EDIT. -// versions: -// protoc-gen-go v1.25.0 -// protoc v3.10.0 -// source: feast/core/IngestionJob.proto - -package core - -import ( - proto "github.com/golang/protobuf/proto" - protoreflect "google.golang.org/protobuf/reflect/protoreflect" - protoimpl "google.golang.org/protobuf/runtime/protoimpl" - reflect "reflect" - sync "sync" -) - -const ( - // Verify that this generated code is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) - // Verify that runtime/protoimpl is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) -) - -// This is a compile-time assertion that a sufficiently up-to-date version -// of the legacy proto package is being used. -const _ = proto.ProtoPackageIsVersion4 - -// Status of a Feast Ingestion Job -type IngestionJobStatus int32 - -const ( - // Job status is not known. - IngestionJobStatus_UNKNOWN IngestionJobStatus = 0 - // Import job is submitted to runner and currently pending for executing - IngestionJobStatus_PENDING IngestionJobStatus = 1 - // Import job is currently running in the runner - IngestionJobStatus_RUNNING IngestionJobStatus = 2 - // Runner's reported the import job has completed (applicable to batch job) - IngestionJobStatus_COMPLETED IngestionJobStatus = 3 - // When user sent abort command, but it's still running - IngestionJobStatus_ABORTING IngestionJobStatus = 4 - // User initiated abort job - IngestionJobStatus_ABORTED IngestionJobStatus = 5 - // Runner's reported that the import job failed to run or there is a failure during job - IngestionJobStatus_ERROR IngestionJobStatus = 6 - // job has been suspended and waiting for cleanup - IngestionJobStatus_SUSPENDING IngestionJobStatus = 7 - // job has been suspended - IngestionJobStatus_SUSPENDED IngestionJobStatus = 8 -) - -// Enum value maps for IngestionJobStatus. -var ( - IngestionJobStatus_name = map[int32]string{ - 0: "UNKNOWN", - 1: "PENDING", - 2: "RUNNING", - 3: "COMPLETED", - 4: "ABORTING", - 5: "ABORTED", - 6: "ERROR", - 7: "SUSPENDING", - 8: "SUSPENDED", - } - IngestionJobStatus_value = map[string]int32{ - "UNKNOWN": 0, - "PENDING": 1, - "RUNNING": 2, - "COMPLETED": 3, - "ABORTING": 4, - "ABORTED": 5, - "ERROR": 6, - "SUSPENDING": 7, - "SUSPENDED": 8, - } -) - -func (x IngestionJobStatus) Enum() *IngestionJobStatus { - p := new(IngestionJobStatus) - *p = x - return p -} - -func (x IngestionJobStatus) String() string { - return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) -} - -func (IngestionJobStatus) Descriptor() protoreflect.EnumDescriptor { - return file_feast_core_IngestionJob_proto_enumTypes[0].Descriptor() -} - -func (IngestionJobStatus) Type() protoreflect.EnumType { - return &file_feast_core_IngestionJob_proto_enumTypes[0] -} - -func (x IngestionJobStatus) Number() protoreflect.EnumNumber { - return protoreflect.EnumNumber(x) -} - -// Deprecated: Use IngestionJobStatus.Descriptor instead. -func (IngestionJobStatus) EnumDescriptor() ([]byte, []int) { - return file_feast_core_IngestionJob_proto_rawDescGZIP(), []int{0} -} - -// Represents Feast Injestion Job -type IngestionJob struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Job ID assigned by Feast - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` - // External job ID specific to the runner. - // For DirectRunner jobs, this is identical to id. For DataflowRunner jobs, this refers to the Dataflow job ID. - ExternalId string `protobuf:"bytes,2,opt,name=external_id,json=externalId,proto3" json:"external_id,omitempty"` - Status IngestionJobStatus `protobuf:"varint,3,opt,name=status,proto3,enum=feast.core.IngestionJobStatus" json:"status,omitempty"` - // Source this job is reading from. - Source *Source `protobuf:"bytes,5,opt,name=source,proto3" json:"source,omitempty"` - // Store this job is writing to. - Stores []*Store `protobuf:"bytes,6,rep,name=stores,proto3" json:"stores,omitempty"` - // List of Feature Set References - FeatureSetReferences []*FeatureSetReference `protobuf:"bytes,7,rep,name=feature_set_references,json=featureSetReferences,proto3" json:"feature_set_references,omitempty"` -} - -func (x *IngestionJob) Reset() { - *x = IngestionJob{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_IngestionJob_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *IngestionJob) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*IngestionJob) ProtoMessage() {} - -func (x *IngestionJob) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_IngestionJob_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use IngestionJob.ProtoReflect.Descriptor instead. -func (*IngestionJob) Descriptor() ([]byte, []int) { - return file_feast_core_IngestionJob_proto_rawDescGZIP(), []int{0} -} - -func (x *IngestionJob) GetId() string { - if x != nil { - return x.Id - } - return "" -} - -func (x *IngestionJob) GetExternalId() string { - if x != nil { - return x.ExternalId - } - return "" -} - -func (x *IngestionJob) GetStatus() IngestionJobStatus { - if x != nil { - return x.Status - } - return IngestionJobStatus_UNKNOWN -} - -func (x *IngestionJob) GetSource() *Source { - if x != nil { - return x.Source - } - return nil -} - -func (x *IngestionJob) GetStores() []*Store { - if x != nil { - return x.Stores - } - return nil -} - -func (x *IngestionJob) GetFeatureSetReferences() []*FeatureSetReference { - if x != nil { - return x.FeatureSetReferences - } - return nil -} - -// Config for bi-directional communication channel between Core Service and Ingestion Job -type SpecsStreamingUpdateConfig struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // out-channel for publishing new FeatureSetSpecs (by Core). - // IngestionJob use it as source of existing FeatureSetSpecs and new real-time updates - Source *KafkaSourceConfig `protobuf:"bytes,1,opt,name=source,proto3" json:"source,omitempty"` - // ack-channel for sending acknowledgments when new FeatureSetSpecs is installed in Job - Ack *KafkaSourceConfig `protobuf:"bytes,2,opt,name=ack,proto3" json:"ack,omitempty"` -} - -func (x *SpecsStreamingUpdateConfig) Reset() { - *x = SpecsStreamingUpdateConfig{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_IngestionJob_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *SpecsStreamingUpdateConfig) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*SpecsStreamingUpdateConfig) ProtoMessage() {} - -func (x *SpecsStreamingUpdateConfig) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_IngestionJob_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use SpecsStreamingUpdateConfig.ProtoReflect.Descriptor instead. -func (*SpecsStreamingUpdateConfig) Descriptor() ([]byte, []int) { - return file_feast_core_IngestionJob_proto_rawDescGZIP(), []int{1} -} - -func (x *SpecsStreamingUpdateConfig) GetSource() *KafkaSourceConfig { - if x != nil { - return x.Source - } - return nil -} - -func (x *SpecsStreamingUpdateConfig) GetAck() *KafkaSourceConfig { - if x != nil { - return x.Ack - } - return nil -} - -type FeatureSetSpecAck struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - FeatureSetReference string `protobuf:"bytes,1,opt,name=feature_set_reference,json=featureSetReference,proto3" json:"feature_set_reference,omitempty"` - FeatureSetVersion int32 `protobuf:"varint,2,opt,name=feature_set_version,json=featureSetVersion,proto3" json:"feature_set_version,omitempty"` - JobName string `protobuf:"bytes,3,opt,name=job_name,json=jobName,proto3" json:"job_name,omitempty"` -} - -func (x *FeatureSetSpecAck) Reset() { - *x = FeatureSetSpecAck{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_IngestionJob_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *FeatureSetSpecAck) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*FeatureSetSpecAck) ProtoMessage() {} - -func (x *FeatureSetSpecAck) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_IngestionJob_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use FeatureSetSpecAck.ProtoReflect.Descriptor instead. -func (*FeatureSetSpecAck) Descriptor() ([]byte, []int) { - return file_feast_core_IngestionJob_proto_rawDescGZIP(), []int{2} -} - -func (x *FeatureSetSpecAck) GetFeatureSetReference() string { - if x != nil { - return x.FeatureSetReference - } - return "" -} - -func (x *FeatureSetSpecAck) GetFeatureSetVersion() int32 { - if x != nil { - return x.FeatureSetVersion - } - return 0 -} - -func (x *FeatureSetSpecAck) GetJobName() string { - if x != nil { - return x.JobName - } - return "" -} - -var File_feast_core_IngestionJob_proto protoreflect.FileDescriptor - -var file_feast_core_IngestionJob_proto_rawDesc = []byte{ - 0x0a, 0x1d, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x49, 0x6e, 0x67, - 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x4a, 0x6f, 0x62, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, - 0x0a, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x1a, 0x24, 0x66, 0x65, 0x61, - 0x73, 0x74, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, - 0x65, 0x74, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x1a, 0x16, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x53, 0x74, - 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x17, 0x66, 0x65, 0x61, 0x73, 0x74, - 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x22, 0xab, 0x02, 0x0a, 0x0c, 0x49, 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, - 0x4a, 0x6f, 0x62, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x02, 0x69, 0x64, 0x12, 0x1f, 0x0a, 0x0b, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, - 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, - 0x61, 0x6c, 0x49, 0x64, 0x12, 0x36, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x03, - 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1e, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, - 0x65, 0x2e, 0x49, 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x4a, 0x6f, 0x62, 0x53, 0x74, - 0x61, 0x74, 0x75, 0x73, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x2a, 0x0a, 0x06, - 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x66, - 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, - 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x29, 0x0a, 0x06, 0x73, 0x74, 0x6f, 0x72, - 0x65, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, - 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x52, 0x06, 0x73, 0x74, 0x6f, - 0x72, 0x65, 0x73, 0x12, 0x55, 0x0a, 0x16, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x5f, 0x73, - 0x65, 0x74, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x18, 0x07, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, - 0x2e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x52, 0x65, 0x66, 0x65, 0x72, - 0x65, 0x6e, 0x63, 0x65, 0x52, 0x14, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, - 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x4a, 0x04, 0x08, 0x04, 0x10, 0x05, - 0x22, 0x84, 0x01, 0x0a, 0x1a, 0x53, 0x70, 0x65, 0x63, 0x73, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, - 0x69, 0x6e, 0x67, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, - 0x35, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x1d, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4b, 0x61, 0x66, - 0x6b, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x06, - 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x2f, 0x0a, 0x03, 0x61, 0x63, 0x6b, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, - 0x2e, 0x4b, 0x61, 0x66, 0x6b, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x66, - 0x69, 0x67, 0x52, 0x03, 0x61, 0x63, 0x6b, 0x22, 0x92, 0x01, 0x0a, 0x11, 0x46, 0x65, 0x61, 0x74, - 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x53, 0x70, 0x65, 0x63, 0x41, 0x63, 0x6b, 0x12, 0x32, 0x0a, - 0x15, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x5f, 0x72, 0x65, 0x66, - 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x66, 0x65, - 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, - 0x65, 0x12, 0x2e, 0x0a, 0x13, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x5f, 0x73, 0x65, 0x74, - 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x11, - 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, - 0x6e, 0x12, 0x19, 0x0a, 0x08, 0x6a, 0x6f, 0x62, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x07, 0x6a, 0x6f, 0x62, 0x4e, 0x61, 0x6d, 0x65, 0x2a, 0x8f, 0x01, 0x0a, - 0x12, 0x49, 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x4a, 0x6f, 0x62, 0x53, 0x74, 0x61, - 0x74, 0x75, 0x73, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, - 0x12, 0x0b, 0x0a, 0x07, 0x50, 0x45, 0x4e, 0x44, 0x49, 0x4e, 0x47, 0x10, 0x01, 0x12, 0x0b, 0x0a, - 0x07, 0x52, 0x55, 0x4e, 0x4e, 0x49, 0x4e, 0x47, 0x10, 0x02, 0x12, 0x0d, 0x0a, 0x09, 0x43, 0x4f, - 0x4d, 0x50, 0x4c, 0x45, 0x54, 0x45, 0x44, 0x10, 0x03, 0x12, 0x0c, 0x0a, 0x08, 0x41, 0x42, 0x4f, - 0x52, 0x54, 0x49, 0x4e, 0x47, 0x10, 0x04, 0x12, 0x0b, 0x0a, 0x07, 0x41, 0x42, 0x4f, 0x52, 0x54, - 0x45, 0x44, 0x10, 0x05, 0x12, 0x09, 0x0a, 0x05, 0x45, 0x52, 0x52, 0x4f, 0x52, 0x10, 0x06, 0x12, - 0x0e, 0x0a, 0x0a, 0x53, 0x55, 0x53, 0x50, 0x45, 0x4e, 0x44, 0x49, 0x4e, 0x47, 0x10, 0x07, 0x12, - 0x0d, 0x0a, 0x09, 0x53, 0x55, 0x53, 0x50, 0x45, 0x4e, 0x44, 0x45, 0x44, 0x10, 0x08, 0x42, 0x5a, - 0x0a, 0x10, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x63, 0x6f, - 0x72, 0x65, 0x42, 0x11, 0x49, 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x4a, 0x6f, 0x62, - 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x5a, 0x33, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, - 0x6d, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2d, 0x64, 0x65, 0x76, 0x2f, 0x66, 0x65, 0x61, 0x73, - 0x74, 0x2f, 0x73, 0x64, 0x6b, 0x2f, 0x67, 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, - 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x33, -} - -var ( - file_feast_core_IngestionJob_proto_rawDescOnce sync.Once - file_feast_core_IngestionJob_proto_rawDescData = file_feast_core_IngestionJob_proto_rawDesc -) - -func file_feast_core_IngestionJob_proto_rawDescGZIP() []byte { - file_feast_core_IngestionJob_proto_rawDescOnce.Do(func() { - file_feast_core_IngestionJob_proto_rawDescData = protoimpl.X.CompressGZIP(file_feast_core_IngestionJob_proto_rawDescData) - }) - return file_feast_core_IngestionJob_proto_rawDescData -} - -var file_feast_core_IngestionJob_proto_enumTypes = make([]protoimpl.EnumInfo, 1) -var file_feast_core_IngestionJob_proto_msgTypes = make([]protoimpl.MessageInfo, 3) -var file_feast_core_IngestionJob_proto_goTypes = []interface{}{ - (IngestionJobStatus)(0), // 0: feast.core.IngestionJobStatus - (*IngestionJob)(nil), // 1: feast.core.IngestionJob - (*SpecsStreamingUpdateConfig)(nil), // 2: feast.core.SpecsStreamingUpdateConfig - (*FeatureSetSpecAck)(nil), // 3: feast.core.FeatureSetSpecAck - (*Source)(nil), // 4: feast.core.Source - (*Store)(nil), // 5: feast.core.Store - (*FeatureSetReference)(nil), // 6: feast.core.FeatureSetReference - (*KafkaSourceConfig)(nil), // 7: feast.core.KafkaSourceConfig -} -var file_feast_core_IngestionJob_proto_depIdxs = []int32{ - 0, // 0: feast.core.IngestionJob.status:type_name -> feast.core.IngestionJobStatus - 4, // 1: feast.core.IngestionJob.source:type_name -> feast.core.Source - 5, // 2: feast.core.IngestionJob.stores:type_name -> feast.core.Store - 6, // 3: feast.core.IngestionJob.feature_set_references:type_name -> feast.core.FeatureSetReference - 7, // 4: feast.core.SpecsStreamingUpdateConfig.source:type_name -> feast.core.KafkaSourceConfig - 7, // 5: feast.core.SpecsStreamingUpdateConfig.ack:type_name -> feast.core.KafkaSourceConfig - 6, // [6:6] is the sub-list for method output_type - 6, // [6:6] is the sub-list for method input_type - 6, // [6:6] is the sub-list for extension type_name - 6, // [6:6] is the sub-list for extension extendee - 0, // [0:6] is the sub-list for field type_name -} - -func init() { file_feast_core_IngestionJob_proto_init() } -func file_feast_core_IngestionJob_proto_init() { - if File_feast_core_IngestionJob_proto != nil { - return - } - file_feast_core_FeatureSetReference_proto_init() - file_feast_core_Store_proto_init() - file_feast_core_Source_proto_init() - if !protoimpl.UnsafeEnabled { - file_feast_core_IngestionJob_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*IngestionJob); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_IngestionJob_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*SpecsStreamingUpdateConfig); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_IngestionJob_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*FeatureSetSpecAck); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - type x struct{} - out := protoimpl.TypeBuilder{ - File: protoimpl.DescBuilder{ - GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_feast_core_IngestionJob_proto_rawDesc, - NumEnums: 1, - NumMessages: 3, - NumExtensions: 0, - NumServices: 0, - }, - GoTypes: file_feast_core_IngestionJob_proto_goTypes, - DependencyIndexes: file_feast_core_IngestionJob_proto_depIdxs, - EnumInfos: file_feast_core_IngestionJob_proto_enumTypes, - MessageInfos: file_feast_core_IngestionJob_proto_msgTypes, - }.Build() - File_feast_core_IngestionJob_proto = out.File - file_feast_core_IngestionJob_proto_rawDesc = nil - file_feast_core_IngestionJob_proto_goTypes = nil - file_feast_core_IngestionJob_proto_depIdxs = nil -} diff --git a/sdk/go/protos/feast/core/JobService.pb.go b/sdk/go/protos/feast/core/JobService.pb.go deleted file mode 100644 index 221c530bb3..0000000000 --- a/sdk/go/protos/feast/core/JobService.pb.go +++ /dev/null @@ -1,1789 +0,0 @@ -// -// Copyright 2018 The Feast Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -// Code generated by protoc-gen-go. DO NOT EDIT. -// versions: -// protoc-gen-go v1.25.0 -// protoc v3.12.4 -// source: feast/core/JobService.proto - -package core - -import ( - context "context" - proto "github.com/golang/protobuf/proto" - timestamp "github.com/golang/protobuf/ptypes/timestamp" - grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" - protoreflect "google.golang.org/protobuf/reflect/protoreflect" - protoimpl "google.golang.org/protobuf/runtime/protoimpl" - reflect "reflect" - sync "sync" -) - -const ( - // Verify that this generated code is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) - // Verify that runtime/protoimpl is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) -) - -// This is a compile-time assertion that a sufficiently up-to-date version -// of the legacy proto package is being used. -const _ = proto.ProtoPackageIsVersion4 - -type JobType int32 - -const ( - JobType_INVALID_JOB JobType = 0 - JobType_BATCH_INGESTION_JOB JobType = 1 - JobType_STREAM_INGESTION_JOB JobType = 2 - JobType_RETRIEVAL_JOB JobType = 4 -) - -// Enum value maps for JobType. -var ( - JobType_name = map[int32]string{ - 0: "INVALID_JOB", - 1: "BATCH_INGESTION_JOB", - 2: "STREAM_INGESTION_JOB", - 4: "RETRIEVAL_JOB", - } - JobType_value = map[string]int32{ - "INVALID_JOB": 0, - "BATCH_INGESTION_JOB": 1, - "STREAM_INGESTION_JOB": 2, - "RETRIEVAL_JOB": 4, - } -) - -func (x JobType) Enum() *JobType { - p := new(JobType) - *p = x - return p -} - -func (x JobType) String() string { - return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) -} - -func (JobType) Descriptor() protoreflect.EnumDescriptor { - return file_feast_core_JobService_proto_enumTypes[0].Descriptor() -} - -func (JobType) Type() protoreflect.EnumType { - return &file_feast_core_JobService_proto_enumTypes[0] -} - -func (x JobType) Number() protoreflect.EnumNumber { - return protoreflect.EnumNumber(x) -} - -// Deprecated: Use JobType.Descriptor instead. -func (JobType) EnumDescriptor() ([]byte, []int) { - return file_feast_core_JobService_proto_rawDescGZIP(), []int{0} -} - -type JobStatus int32 - -const ( - JobStatus_JOB_STATUS_INVALID JobStatus = 0 - // The Job has be registered and waiting to get scheduled to run - JobStatus_JOB_STATUS_PENDING JobStatus = 1 - // The Job is currently processing its task - JobStatus_JOB_STATUS_RUNNING JobStatus = 2 - // The Job has successfully completed its task - JobStatus_JOB_STATUS_DONE JobStatus = 3 - // The Job has encountered an error while processing its task - JobStatus_JOB_STATUS_ERROR JobStatus = 4 -) - -// Enum value maps for JobStatus. -var ( - JobStatus_name = map[int32]string{ - 0: "JOB_STATUS_INVALID", - 1: "JOB_STATUS_PENDING", - 2: "JOB_STATUS_RUNNING", - 3: "JOB_STATUS_DONE", - 4: "JOB_STATUS_ERROR", - } - JobStatus_value = map[string]int32{ - "JOB_STATUS_INVALID": 0, - "JOB_STATUS_PENDING": 1, - "JOB_STATUS_RUNNING": 2, - "JOB_STATUS_DONE": 3, - "JOB_STATUS_ERROR": 4, - } -) - -func (x JobStatus) Enum() *JobStatus { - p := new(JobStatus) - *p = x - return p -} - -func (x JobStatus) String() string { - return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) -} - -func (JobStatus) Descriptor() protoreflect.EnumDescriptor { - return file_feast_core_JobService_proto_enumTypes[1].Descriptor() -} - -func (JobStatus) Type() protoreflect.EnumType { - return &file_feast_core_JobService_proto_enumTypes[1] -} - -func (x JobStatus) Number() protoreflect.EnumNumber { - return protoreflect.EnumNumber(x) -} - -// Deprecated: Use JobStatus.Descriptor instead. -func (JobStatus) EnumDescriptor() ([]byte, []int) { - return file_feast_core_JobService_proto_rawDescGZIP(), []int{1} -} - -type Job struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Identifier of the Job - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` - // Type of the Job - Type JobType `protobuf:"varint,2,opt,name=type,proto3,enum=feast.core.JobType" json:"type,omitempty"` - // Current job status - Status JobStatus `protobuf:"varint,3,opt,name=status,proto3,enum=feast.core.JobStatus" json:"status,omitempty"` - // Deterministic hash of the Job - Hash string `protobuf:"bytes,8,opt,name=hash,proto3" json:"hash,omitempty"` - // JobType specific metadata on the job - // - // Types that are assignable to Meta: - // *Job_Retrieval - // *Job_BatchIngestion - // *Job_StreamIngestion - Meta isJob_Meta `protobuf_oneof:"meta"` -} - -func (x *Job) Reset() { - *x = Job{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_JobService_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Job) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Job) ProtoMessage() {} - -func (x *Job) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_JobService_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Job.ProtoReflect.Descriptor instead. -func (*Job) Descriptor() ([]byte, []int) { - return file_feast_core_JobService_proto_rawDescGZIP(), []int{0} -} - -func (x *Job) GetId() string { - if x != nil { - return x.Id - } - return "" -} - -func (x *Job) GetType() JobType { - if x != nil { - return x.Type - } - return JobType_INVALID_JOB -} - -func (x *Job) GetStatus() JobStatus { - if x != nil { - return x.Status - } - return JobStatus_JOB_STATUS_INVALID -} - -func (x *Job) GetHash() string { - if x != nil { - return x.Hash - } - return "" -} - -func (m *Job) GetMeta() isJob_Meta { - if m != nil { - return m.Meta - } - return nil -} - -func (x *Job) GetRetrieval() *Job_RetrievalJobMeta { - if x, ok := x.GetMeta().(*Job_Retrieval); ok { - return x.Retrieval - } - return nil -} - -func (x *Job) GetBatchIngestion() *Job_OfflineToOnlineMeta { - if x, ok := x.GetMeta().(*Job_BatchIngestion); ok { - return x.BatchIngestion - } - return nil -} - -func (x *Job) GetStreamIngestion() *Job_StreamToOnlineMeta { - if x, ok := x.GetMeta().(*Job_StreamIngestion); ok { - return x.StreamIngestion - } - return nil -} - -type isJob_Meta interface { - isJob_Meta() -} - -type Job_Retrieval struct { - Retrieval *Job_RetrievalJobMeta `protobuf:"bytes,5,opt,name=retrieval,proto3,oneof"` -} - -type Job_BatchIngestion struct { - BatchIngestion *Job_OfflineToOnlineMeta `protobuf:"bytes,6,opt,name=batch_ingestion,json=batchIngestion,proto3,oneof"` -} - -type Job_StreamIngestion struct { - StreamIngestion *Job_StreamToOnlineMeta `protobuf:"bytes,7,opt,name=stream_ingestion,json=streamIngestion,proto3,oneof"` -} - -func (*Job_Retrieval) isJob_Meta() {} - -func (*Job_BatchIngestion) isJob_Meta() {} - -func (*Job_StreamIngestion) isJob_Meta() {} - -// Ingest data from offline store into online store -type StartOfflineToOnlineIngestionJobRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Feature table to ingest - Project string `protobuf:"bytes,1,opt,name=project,proto3" json:"project,omitempty"` - TableName string `protobuf:"bytes,2,opt,name=table_name,json=tableName,proto3" json:"table_name,omitempty"` - // Start of time range for source data from offline store - StartDate *timestamp.Timestamp `protobuf:"bytes,3,opt,name=start_date,json=startDate,proto3" json:"start_date,omitempty"` - // End of time range for source data from offline store - EndDate *timestamp.Timestamp `protobuf:"bytes,4,opt,name=end_date,json=endDate,proto3" json:"end_date,omitempty"` -} - -func (x *StartOfflineToOnlineIngestionJobRequest) Reset() { - *x = StartOfflineToOnlineIngestionJobRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_JobService_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *StartOfflineToOnlineIngestionJobRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*StartOfflineToOnlineIngestionJobRequest) ProtoMessage() {} - -func (x *StartOfflineToOnlineIngestionJobRequest) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_JobService_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use StartOfflineToOnlineIngestionJobRequest.ProtoReflect.Descriptor instead. -func (*StartOfflineToOnlineIngestionJobRequest) Descriptor() ([]byte, []int) { - return file_feast_core_JobService_proto_rawDescGZIP(), []int{1} -} - -func (x *StartOfflineToOnlineIngestionJobRequest) GetProject() string { - if x != nil { - return x.Project - } - return "" -} - -func (x *StartOfflineToOnlineIngestionJobRequest) GetTableName() string { - if x != nil { - return x.TableName - } - return "" -} - -func (x *StartOfflineToOnlineIngestionJobRequest) GetStartDate() *timestamp.Timestamp { - if x != nil { - return x.StartDate - } - return nil -} - -func (x *StartOfflineToOnlineIngestionJobRequest) GetEndDate() *timestamp.Timestamp { - if x != nil { - return x.EndDate - } - return nil -} - -type StartOfflineToOnlineIngestionJobResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Job ID assigned by Feast - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` -} - -func (x *StartOfflineToOnlineIngestionJobResponse) Reset() { - *x = StartOfflineToOnlineIngestionJobResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_JobService_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *StartOfflineToOnlineIngestionJobResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*StartOfflineToOnlineIngestionJobResponse) ProtoMessage() {} - -func (x *StartOfflineToOnlineIngestionJobResponse) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_JobService_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use StartOfflineToOnlineIngestionJobResponse.ProtoReflect.Descriptor instead. -func (*StartOfflineToOnlineIngestionJobResponse) Descriptor() ([]byte, []int) { - return file_feast_core_JobService_proto_rawDescGZIP(), []int{2} -} - -func (x *StartOfflineToOnlineIngestionJobResponse) GetId() string { - if x != nil { - return x.Id - } - return "" -} - -type GetHistoricalFeaturesRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // List of feature references that are being retrieved - FeatureRefs []string `protobuf:"bytes,1,rep,name=feature_refs,json=featureRefs,proto3" json:"feature_refs,omitempty"` - // Batch DataSource that can be used to obtain entity values for historical retrieval. - // For each entity value, a feature value will be retrieved for that value/timestamp - // Only 'BATCH_*' source types are supported. - // Currently only BATCH_FILE source type is supported. - EntitySource *DataSource `protobuf:"bytes,2,opt,name=entity_source,json=entitySource,proto3" json:"entity_source,omitempty"` - // Optional field to specify project name override. If specified, uses the - // given project for retrieval. Overrides the projects specified in - // Feature References if both are specified. - Project string `protobuf:"bytes,3,opt,name=project,proto3" json:"project,omitempty"` - // Specifies the path in a bucket to write the exported feature data files - // Export to AWS S3 - s3://path/to/features - // Export to GCP GCS - gs://path/to/features - OutputLocation string `protobuf:"bytes,4,opt,name=output_location,json=outputLocation,proto3" json:"output_location,omitempty"` - // Specify format name for output, eg. parquet - OutputFormat string `protobuf:"bytes,5,opt,name=output_format,json=outputFormat,proto3" json:"output_format,omitempty"` -} - -func (x *GetHistoricalFeaturesRequest) Reset() { - *x = GetHistoricalFeaturesRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_JobService_proto_msgTypes[3] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetHistoricalFeaturesRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetHistoricalFeaturesRequest) ProtoMessage() {} - -func (x *GetHistoricalFeaturesRequest) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_JobService_proto_msgTypes[3] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetHistoricalFeaturesRequest.ProtoReflect.Descriptor instead. -func (*GetHistoricalFeaturesRequest) Descriptor() ([]byte, []int) { - return file_feast_core_JobService_proto_rawDescGZIP(), []int{3} -} - -func (x *GetHistoricalFeaturesRequest) GetFeatureRefs() []string { - if x != nil { - return x.FeatureRefs - } - return nil -} - -func (x *GetHistoricalFeaturesRequest) GetEntitySource() *DataSource { - if x != nil { - return x.EntitySource - } - return nil -} - -func (x *GetHistoricalFeaturesRequest) GetProject() string { - if x != nil { - return x.Project - } - return "" -} - -func (x *GetHistoricalFeaturesRequest) GetOutputLocation() string { - if x != nil { - return x.OutputLocation - } - return "" -} - -func (x *GetHistoricalFeaturesRequest) GetOutputFormat() string { - if x != nil { - return x.OutputFormat - } - return "" -} - -type GetHistoricalFeaturesResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Export Job with ID assigned by Feast - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` - OutputFileUri string `protobuf:"bytes,2,opt,name=output_file_uri,json=outputFileUri,proto3" json:"output_file_uri,omitempty"` -} - -func (x *GetHistoricalFeaturesResponse) Reset() { - *x = GetHistoricalFeaturesResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_JobService_proto_msgTypes[4] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetHistoricalFeaturesResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetHistoricalFeaturesResponse) ProtoMessage() {} - -func (x *GetHistoricalFeaturesResponse) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_JobService_proto_msgTypes[4] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetHistoricalFeaturesResponse.ProtoReflect.Descriptor instead. -func (*GetHistoricalFeaturesResponse) Descriptor() ([]byte, []int) { - return file_feast_core_JobService_proto_rawDescGZIP(), []int{4} -} - -func (x *GetHistoricalFeaturesResponse) GetId() string { - if x != nil { - return x.Id - } - return "" -} - -func (x *GetHistoricalFeaturesResponse) GetOutputFileUri() string { - if x != nil { - return x.OutputFileUri - } - return "" -} - -type StartStreamToOnlineIngestionJobRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Feature table to ingest - Project string `protobuf:"bytes,1,opt,name=project,proto3" json:"project,omitempty"` - TableName string `protobuf:"bytes,2,opt,name=table_name,json=tableName,proto3" json:"table_name,omitempty"` -} - -func (x *StartStreamToOnlineIngestionJobRequest) Reset() { - *x = StartStreamToOnlineIngestionJobRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_JobService_proto_msgTypes[5] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *StartStreamToOnlineIngestionJobRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*StartStreamToOnlineIngestionJobRequest) ProtoMessage() {} - -func (x *StartStreamToOnlineIngestionJobRequest) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_JobService_proto_msgTypes[5] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use StartStreamToOnlineIngestionJobRequest.ProtoReflect.Descriptor instead. -func (*StartStreamToOnlineIngestionJobRequest) Descriptor() ([]byte, []int) { - return file_feast_core_JobService_proto_rawDescGZIP(), []int{5} -} - -func (x *StartStreamToOnlineIngestionJobRequest) GetProject() string { - if x != nil { - return x.Project - } - return "" -} - -func (x *StartStreamToOnlineIngestionJobRequest) GetTableName() string { - if x != nil { - return x.TableName - } - return "" -} - -type StartStreamToOnlineIngestionJobResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Job ID assigned by Feast - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` -} - -func (x *StartStreamToOnlineIngestionJobResponse) Reset() { - *x = StartStreamToOnlineIngestionJobResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_JobService_proto_msgTypes[6] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *StartStreamToOnlineIngestionJobResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*StartStreamToOnlineIngestionJobResponse) ProtoMessage() {} - -func (x *StartStreamToOnlineIngestionJobResponse) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_JobService_proto_msgTypes[6] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use StartStreamToOnlineIngestionJobResponse.ProtoReflect.Descriptor instead. -func (*StartStreamToOnlineIngestionJobResponse) Descriptor() ([]byte, []int) { - return file_feast_core_JobService_proto_rawDescGZIP(), []int{6} -} - -func (x *StartStreamToOnlineIngestionJobResponse) GetId() string { - if x != nil { - return x.Id - } - return "" -} - -type ListJobsRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - IncludeTerminated bool `protobuf:"varint,1,opt,name=include_terminated,json=includeTerminated,proto3" json:"include_terminated,omitempty"` -} - -func (x *ListJobsRequest) Reset() { - *x = ListJobsRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_JobService_proto_msgTypes[7] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *ListJobsRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*ListJobsRequest) ProtoMessage() {} - -func (x *ListJobsRequest) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_JobService_proto_msgTypes[7] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use ListJobsRequest.ProtoReflect.Descriptor instead. -func (*ListJobsRequest) Descriptor() ([]byte, []int) { - return file_feast_core_JobService_proto_rawDescGZIP(), []int{7} -} - -func (x *ListJobsRequest) GetIncludeTerminated() bool { - if x != nil { - return x.IncludeTerminated - } - return false -} - -type ListJobsResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Jobs []*Job `protobuf:"bytes,1,rep,name=jobs,proto3" json:"jobs,omitempty"` -} - -func (x *ListJobsResponse) Reset() { - *x = ListJobsResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_JobService_proto_msgTypes[8] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *ListJobsResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*ListJobsResponse) ProtoMessage() {} - -func (x *ListJobsResponse) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_JobService_proto_msgTypes[8] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use ListJobsResponse.ProtoReflect.Descriptor instead. -func (*ListJobsResponse) Descriptor() ([]byte, []int) { - return file_feast_core_JobService_proto_rawDescGZIP(), []int{8} -} - -func (x *ListJobsResponse) GetJobs() []*Job { - if x != nil { - return x.Jobs - } - return nil -} - -type GetJobRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - JobId string `protobuf:"bytes,1,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"` -} - -func (x *GetJobRequest) Reset() { - *x = GetJobRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_JobService_proto_msgTypes[9] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetJobRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetJobRequest) ProtoMessage() {} - -func (x *GetJobRequest) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_JobService_proto_msgTypes[9] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetJobRequest.ProtoReflect.Descriptor instead. -func (*GetJobRequest) Descriptor() ([]byte, []int) { - return file_feast_core_JobService_proto_rawDescGZIP(), []int{9} -} - -func (x *GetJobRequest) GetJobId() string { - if x != nil { - return x.JobId - } - return "" -} - -type GetJobResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Job *Job `protobuf:"bytes,1,opt,name=job,proto3" json:"job,omitempty"` -} - -func (x *GetJobResponse) Reset() { - *x = GetJobResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_JobService_proto_msgTypes[10] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetJobResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetJobResponse) ProtoMessage() {} - -func (x *GetJobResponse) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_JobService_proto_msgTypes[10] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetJobResponse.ProtoReflect.Descriptor instead. -func (*GetJobResponse) Descriptor() ([]byte, []int) { - return file_feast_core_JobService_proto_rawDescGZIP(), []int{10} -} - -func (x *GetJobResponse) GetJob() *Job { - if x != nil { - return x.Job - } - return nil -} - -type CancelJobRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - JobId string `protobuf:"bytes,1,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"` -} - -func (x *CancelJobRequest) Reset() { - *x = CancelJobRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_JobService_proto_msgTypes[11] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *CancelJobRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*CancelJobRequest) ProtoMessage() {} - -func (x *CancelJobRequest) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_JobService_proto_msgTypes[11] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use CancelJobRequest.ProtoReflect.Descriptor instead. -func (*CancelJobRequest) Descriptor() ([]byte, []int) { - return file_feast_core_JobService_proto_rawDescGZIP(), []int{11} -} - -func (x *CancelJobRequest) GetJobId() string { - if x != nil { - return x.JobId - } - return "" -} - -type CancelJobResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields -} - -func (x *CancelJobResponse) Reset() { - *x = CancelJobResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_JobService_proto_msgTypes[12] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *CancelJobResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*CancelJobResponse) ProtoMessage() {} - -func (x *CancelJobResponse) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_JobService_proto_msgTypes[12] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use CancelJobResponse.ProtoReflect.Descriptor instead. -func (*CancelJobResponse) Descriptor() ([]byte, []int) { - return file_feast_core_JobService_proto_rawDescGZIP(), []int{12} -} - -type Job_RetrievalJobMeta struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - OutputLocation string `protobuf:"bytes,4,opt,name=output_location,json=outputLocation,proto3" json:"output_location,omitempty"` -} - -func (x *Job_RetrievalJobMeta) Reset() { - *x = Job_RetrievalJobMeta{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_JobService_proto_msgTypes[13] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Job_RetrievalJobMeta) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Job_RetrievalJobMeta) ProtoMessage() {} - -func (x *Job_RetrievalJobMeta) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_JobService_proto_msgTypes[13] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Job_RetrievalJobMeta.ProtoReflect.Descriptor instead. -func (*Job_RetrievalJobMeta) Descriptor() ([]byte, []int) { - return file_feast_core_JobService_proto_rawDescGZIP(), []int{0, 0} -} - -func (x *Job_RetrievalJobMeta) GetOutputLocation() string { - if x != nil { - return x.OutputLocation - } - return "" -} - -type Job_OfflineToOnlineMeta struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields -} - -func (x *Job_OfflineToOnlineMeta) Reset() { - *x = Job_OfflineToOnlineMeta{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_JobService_proto_msgTypes[14] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Job_OfflineToOnlineMeta) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Job_OfflineToOnlineMeta) ProtoMessage() {} - -func (x *Job_OfflineToOnlineMeta) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_JobService_proto_msgTypes[14] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Job_OfflineToOnlineMeta.ProtoReflect.Descriptor instead. -func (*Job_OfflineToOnlineMeta) Descriptor() ([]byte, []int) { - return file_feast_core_JobService_proto_rawDescGZIP(), []int{0, 1} -} - -type Job_StreamToOnlineMeta struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields -} - -func (x *Job_StreamToOnlineMeta) Reset() { - *x = Job_StreamToOnlineMeta{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_JobService_proto_msgTypes[15] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Job_StreamToOnlineMeta) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Job_StreamToOnlineMeta) ProtoMessage() {} - -func (x *Job_StreamToOnlineMeta) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_JobService_proto_msgTypes[15] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Job_StreamToOnlineMeta.ProtoReflect.Descriptor instead. -func (*Job_StreamToOnlineMeta) Descriptor() ([]byte, []int) { - return file_feast_core_JobService_proto_rawDescGZIP(), []int{0, 2} -} - -var File_feast_core_JobService_proto protoreflect.FileDescriptor - -var file_feast_core_JobService_proto_rawDesc = []byte{ - 0x0a, 0x1b, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x4a, 0x6f, 0x62, - 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0a, 0x66, - 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, - 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, - 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1b, 0x66, 0x65, 0x61, 0x73, - 0x74, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, - 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0xd6, 0x03, 0x0a, 0x03, 0x4a, 0x6f, 0x62, 0x12, - 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, - 0x27, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x13, 0x2e, - 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4a, 0x6f, 0x62, 0x54, 0x79, - 0x70, 0x65, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x2d, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, - 0x75, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x15, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, - 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4a, 0x6f, 0x62, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, - 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x68, 0x61, 0x73, 0x68, 0x18, - 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x68, 0x61, 0x73, 0x68, 0x12, 0x40, 0x0a, 0x09, 0x72, - 0x65, 0x74, 0x72, 0x69, 0x65, 0x76, 0x61, 0x6c, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, - 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4a, 0x6f, 0x62, 0x2e, - 0x52, 0x65, 0x74, 0x72, 0x69, 0x65, 0x76, 0x61, 0x6c, 0x4a, 0x6f, 0x62, 0x4d, 0x65, 0x74, 0x61, - 0x48, 0x00, 0x52, 0x09, 0x72, 0x65, 0x74, 0x72, 0x69, 0x65, 0x76, 0x61, 0x6c, 0x12, 0x4e, 0x0a, - 0x0f, 0x62, 0x61, 0x74, 0x63, 0x68, 0x5f, 0x69, 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, - 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, - 0x6f, 0x72, 0x65, 0x2e, 0x4a, 0x6f, 0x62, 0x2e, 0x4f, 0x66, 0x66, 0x6c, 0x69, 0x6e, 0x65, 0x54, - 0x6f, 0x4f, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x48, 0x00, 0x52, 0x0e, 0x62, - 0x61, 0x74, 0x63, 0x68, 0x49, 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x4f, 0x0a, - 0x10, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x5f, 0x69, 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, - 0x6e, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, - 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4a, 0x6f, 0x62, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x54, - 0x6f, 0x4f, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x48, 0x00, 0x52, 0x0f, 0x73, - 0x74, 0x72, 0x65, 0x61, 0x6d, 0x49, 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x1a, 0x3b, - 0x0a, 0x10, 0x52, 0x65, 0x74, 0x72, 0x69, 0x65, 0x76, 0x61, 0x6c, 0x4a, 0x6f, 0x62, 0x4d, 0x65, - 0x74, 0x61, 0x12, 0x27, 0x0a, 0x0f, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x6c, 0x6f, 0x63, - 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x6f, 0x75, 0x74, - 0x70, 0x75, 0x74, 0x4c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x1a, 0x15, 0x0a, 0x13, 0x4f, - 0x66, 0x66, 0x6c, 0x69, 0x6e, 0x65, 0x54, 0x6f, 0x4f, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x4d, 0x65, - 0x74, 0x61, 0x1a, 0x14, 0x0a, 0x12, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x54, 0x6f, 0x4f, 0x6e, - 0x6c, 0x69, 0x6e, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x42, 0x06, 0x0a, 0x04, 0x6d, 0x65, 0x74, 0x61, - 0x22, 0xd4, 0x01, 0x0a, 0x27, 0x53, 0x74, 0x61, 0x72, 0x74, 0x4f, 0x66, 0x66, 0x6c, 0x69, 0x6e, - 0x65, 0x54, 0x6f, 0x4f, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x49, 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, - 0x6f, 0x6e, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x18, 0x0a, 0x07, - 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, - 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5f, - 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x74, 0x61, 0x62, 0x6c, - 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x39, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x64, - 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, - 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, - 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x44, 0x61, 0x74, 0x65, - 0x12, 0x35, 0x0a, 0x08, 0x65, 0x6e, 0x64, 0x5f, 0x64, 0x61, 0x74, 0x65, 0x18, 0x04, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x07, - 0x65, 0x6e, 0x64, 0x44, 0x61, 0x74, 0x65, 0x22, 0x3a, 0x0a, 0x28, 0x53, 0x74, 0x61, 0x72, 0x74, - 0x4f, 0x66, 0x66, 0x6c, 0x69, 0x6e, 0x65, 0x54, 0x6f, 0x4f, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x49, - 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x02, 0x69, 0x64, 0x22, 0xe6, 0x01, 0x0a, 0x1c, 0x47, 0x65, 0x74, 0x48, 0x69, 0x73, 0x74, 0x6f, - 0x72, 0x69, 0x63, 0x61, 0x6c, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x12, 0x21, 0x0a, 0x0c, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x5f, - 0x72, 0x65, 0x66, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0b, 0x66, 0x65, 0x61, 0x74, - 0x75, 0x72, 0x65, 0x52, 0x65, 0x66, 0x73, 0x12, 0x3b, 0x0a, 0x0d, 0x65, 0x6e, 0x74, 0x69, 0x74, - 0x79, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, - 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x44, 0x61, 0x74, 0x61, - 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x0c, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x53, 0x6f, - 0x75, 0x72, 0x63, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x18, - 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x12, 0x27, - 0x0a, 0x0f, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x6c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x4c, - 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x23, 0x0a, 0x0d, 0x6f, 0x75, 0x74, 0x70, 0x75, - 0x74, 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, - 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x22, 0x57, 0x0a, 0x1d, - 0x47, 0x65, 0x74, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, 0x46, 0x65, 0x61, - 0x74, 0x75, 0x72, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x0e, 0x0a, - 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x26, 0x0a, - 0x0f, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x75, 0x72, 0x69, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x46, 0x69, - 0x6c, 0x65, 0x55, 0x72, 0x69, 0x22, 0x61, 0x0a, 0x26, 0x53, 0x74, 0x61, 0x72, 0x74, 0x53, 0x74, - 0x72, 0x65, 0x61, 0x6d, 0x54, 0x6f, 0x4f, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x49, 0x6e, 0x67, 0x65, - 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, - 0x18, 0x0a, 0x07, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x07, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x74, 0x61, 0x62, - 0x6c, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x74, - 0x61, 0x62, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x22, 0x39, 0x0a, 0x27, 0x53, 0x74, 0x61, 0x72, - 0x74, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x54, 0x6f, 0x4f, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x49, - 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x02, 0x69, 0x64, 0x22, 0x40, 0x0a, 0x0f, 0x4c, 0x69, 0x73, 0x74, 0x4a, 0x6f, 0x62, 0x73, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2d, 0x0a, 0x12, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x64, - 0x65, 0x5f, 0x74, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x65, 0x64, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x08, 0x52, 0x11, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x64, 0x65, 0x54, 0x65, 0x72, 0x6d, 0x69, - 0x6e, 0x61, 0x74, 0x65, 0x64, 0x22, 0x37, 0x0a, 0x10, 0x4c, 0x69, 0x73, 0x74, 0x4a, 0x6f, 0x62, - 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x23, 0x0a, 0x04, 0x6a, 0x6f, 0x62, - 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, - 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4a, 0x6f, 0x62, 0x52, 0x04, 0x6a, 0x6f, 0x62, 0x73, 0x22, 0x26, - 0x0a, 0x0d, 0x47, 0x65, 0x74, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, - 0x15, 0x0a, 0x06, 0x6a, 0x6f, 0x62, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x05, 0x6a, 0x6f, 0x62, 0x49, 0x64, 0x22, 0x33, 0x0a, 0x0e, 0x47, 0x65, 0x74, 0x4a, 0x6f, 0x62, - 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x21, 0x0a, 0x03, 0x6a, 0x6f, 0x62, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, - 0x72, 0x65, 0x2e, 0x4a, 0x6f, 0x62, 0x52, 0x03, 0x6a, 0x6f, 0x62, 0x22, 0x29, 0x0a, 0x10, 0x43, - 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, - 0x15, 0x0a, 0x06, 0x6a, 0x6f, 0x62, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x05, 0x6a, 0x6f, 0x62, 0x49, 0x64, 0x22, 0x13, 0x0a, 0x11, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, - 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2a, 0x60, 0x0a, 0x07, 0x4a, - 0x6f, 0x62, 0x54, 0x79, 0x70, 0x65, 0x12, 0x0f, 0x0a, 0x0b, 0x49, 0x4e, 0x56, 0x41, 0x4c, 0x49, - 0x44, 0x5f, 0x4a, 0x4f, 0x42, 0x10, 0x00, 0x12, 0x17, 0x0a, 0x13, 0x42, 0x41, 0x54, 0x43, 0x48, - 0x5f, 0x49, 0x4e, 0x47, 0x45, 0x53, 0x54, 0x49, 0x4f, 0x4e, 0x5f, 0x4a, 0x4f, 0x42, 0x10, 0x01, - 0x12, 0x18, 0x0a, 0x14, 0x53, 0x54, 0x52, 0x45, 0x41, 0x4d, 0x5f, 0x49, 0x4e, 0x47, 0x45, 0x53, - 0x54, 0x49, 0x4f, 0x4e, 0x5f, 0x4a, 0x4f, 0x42, 0x10, 0x02, 0x12, 0x11, 0x0a, 0x0d, 0x52, 0x45, - 0x54, 0x52, 0x49, 0x45, 0x56, 0x41, 0x4c, 0x5f, 0x4a, 0x4f, 0x42, 0x10, 0x04, 0x2a, 0x7e, 0x0a, - 0x09, 0x4a, 0x6f, 0x62, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x16, 0x0a, 0x12, 0x4a, 0x4f, - 0x42, 0x5f, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x49, 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, - 0x10, 0x00, 0x12, 0x16, 0x0a, 0x12, 0x4a, 0x4f, 0x42, 0x5f, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, - 0x5f, 0x50, 0x45, 0x4e, 0x44, 0x49, 0x4e, 0x47, 0x10, 0x01, 0x12, 0x16, 0x0a, 0x12, 0x4a, 0x4f, - 0x42, 0x5f, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x52, 0x55, 0x4e, 0x4e, 0x49, 0x4e, 0x47, - 0x10, 0x02, 0x12, 0x13, 0x0a, 0x0f, 0x4a, 0x4f, 0x42, 0x5f, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, - 0x5f, 0x44, 0x4f, 0x4e, 0x45, 0x10, 0x03, 0x12, 0x14, 0x0a, 0x10, 0x4a, 0x4f, 0x42, 0x5f, 0x53, - 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x45, 0x52, 0x52, 0x4f, 0x52, 0x10, 0x04, 0x32, 0xe9, 0x04, - 0x0a, 0x0a, 0x4a, 0x6f, 0x62, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x8d, 0x01, 0x0a, - 0x20, 0x53, 0x74, 0x61, 0x72, 0x74, 0x4f, 0x66, 0x66, 0x6c, 0x69, 0x6e, 0x65, 0x54, 0x6f, 0x4f, - 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x49, 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x4a, 0x6f, - 0x62, 0x12, 0x33, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x53, - 0x74, 0x61, 0x72, 0x74, 0x4f, 0x66, 0x66, 0x6c, 0x69, 0x6e, 0x65, 0x54, 0x6f, 0x4f, 0x6e, 0x6c, - 0x69, 0x6e, 0x65, 0x49, 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x4a, 0x6f, 0x62, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x34, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, - 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x74, 0x61, 0x72, 0x74, 0x4f, 0x66, 0x66, 0x6c, 0x69, 0x6e, 0x65, - 0x54, 0x6f, 0x4f, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x49, 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, - 0x6e, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x6c, 0x0a, 0x15, - 0x47, 0x65, 0x74, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, 0x46, 0x65, 0x61, - 0x74, 0x75, 0x72, 0x65, 0x73, 0x12, 0x28, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, - 0x72, 0x65, 0x2e, 0x47, 0x65, 0x74, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, - 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, - 0x29, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x47, 0x65, 0x74, - 0x48, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, - 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x8a, 0x01, 0x0a, 0x1f, 0x53, - 0x74, 0x61, 0x72, 0x74, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x54, 0x6f, 0x4f, 0x6e, 0x6c, 0x69, - 0x6e, 0x65, 0x49, 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x4a, 0x6f, 0x62, 0x12, 0x32, - 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x74, 0x61, 0x72, - 0x74, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x54, 0x6f, 0x4f, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x49, - 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x1a, 0x33, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, - 0x53, 0x74, 0x61, 0x72, 0x74, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x54, 0x6f, 0x4f, 0x6e, 0x6c, - 0x69, 0x6e, 0x65, 0x49, 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x4a, 0x6f, 0x62, 0x52, - 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x45, 0x0a, 0x08, 0x4c, 0x69, 0x73, 0x74, 0x4a, - 0x6f, 0x62, 0x73, 0x12, 0x1b, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, - 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x4a, 0x6f, 0x62, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x1a, 0x1c, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, - 0x73, 0x74, 0x4a, 0x6f, 0x62, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x48, - 0x0a, 0x09, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x4a, 0x6f, 0x62, 0x12, 0x1c, 0x2e, 0x66, 0x65, - 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x4a, - 0x6f, 0x62, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1d, 0x2e, 0x66, 0x65, 0x61, 0x73, - 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x4a, 0x6f, 0x62, - 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x3f, 0x0a, 0x06, 0x47, 0x65, 0x74, 0x4a, - 0x6f, 0x62, 0x12, 0x19, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, - 0x47, 0x65, 0x74, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1a, 0x2e, - 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x47, 0x65, 0x74, 0x4a, 0x6f, - 0x62, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x58, 0x0a, 0x10, 0x66, 0x65, 0x61, - 0x73, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x42, 0x0f, 0x4a, - 0x6f, 0x62, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x5a, 0x33, - 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, - 0x2d, 0x64, 0x65, 0x76, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x73, 0x64, 0x6b, 0x2f, 0x67, - 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x63, - 0x6f, 0x72, 0x65, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} - -var ( - file_feast_core_JobService_proto_rawDescOnce sync.Once - file_feast_core_JobService_proto_rawDescData = file_feast_core_JobService_proto_rawDesc -) - -func file_feast_core_JobService_proto_rawDescGZIP() []byte { - file_feast_core_JobService_proto_rawDescOnce.Do(func() { - file_feast_core_JobService_proto_rawDescData = protoimpl.X.CompressGZIP(file_feast_core_JobService_proto_rawDescData) - }) - return file_feast_core_JobService_proto_rawDescData -} - -var file_feast_core_JobService_proto_enumTypes = make([]protoimpl.EnumInfo, 2) -var file_feast_core_JobService_proto_msgTypes = make([]protoimpl.MessageInfo, 16) -var file_feast_core_JobService_proto_goTypes = []interface{}{ - (JobType)(0), // 0: feast.core.JobType - (JobStatus)(0), // 1: feast.core.JobStatus - (*Job)(nil), // 2: feast.core.Job - (*StartOfflineToOnlineIngestionJobRequest)(nil), // 3: feast.core.StartOfflineToOnlineIngestionJobRequest - (*StartOfflineToOnlineIngestionJobResponse)(nil), // 4: feast.core.StartOfflineToOnlineIngestionJobResponse - (*GetHistoricalFeaturesRequest)(nil), // 5: feast.core.GetHistoricalFeaturesRequest - (*GetHistoricalFeaturesResponse)(nil), // 6: feast.core.GetHistoricalFeaturesResponse - (*StartStreamToOnlineIngestionJobRequest)(nil), // 7: feast.core.StartStreamToOnlineIngestionJobRequest - (*StartStreamToOnlineIngestionJobResponse)(nil), // 8: feast.core.StartStreamToOnlineIngestionJobResponse - (*ListJobsRequest)(nil), // 9: feast.core.ListJobsRequest - (*ListJobsResponse)(nil), // 10: feast.core.ListJobsResponse - (*GetJobRequest)(nil), // 11: feast.core.GetJobRequest - (*GetJobResponse)(nil), // 12: feast.core.GetJobResponse - (*CancelJobRequest)(nil), // 13: feast.core.CancelJobRequest - (*CancelJobResponse)(nil), // 14: feast.core.CancelJobResponse - (*Job_RetrievalJobMeta)(nil), // 15: feast.core.Job.RetrievalJobMeta - (*Job_OfflineToOnlineMeta)(nil), // 16: feast.core.Job.OfflineToOnlineMeta - (*Job_StreamToOnlineMeta)(nil), // 17: feast.core.Job.StreamToOnlineMeta - (*timestamp.Timestamp)(nil), // 18: google.protobuf.Timestamp - (*DataSource)(nil), // 19: feast.core.DataSource -} -var file_feast_core_JobService_proto_depIdxs = []int32{ - 0, // 0: feast.core.Job.type:type_name -> feast.core.JobType - 1, // 1: feast.core.Job.status:type_name -> feast.core.JobStatus - 15, // 2: feast.core.Job.retrieval:type_name -> feast.core.Job.RetrievalJobMeta - 16, // 3: feast.core.Job.batch_ingestion:type_name -> feast.core.Job.OfflineToOnlineMeta - 17, // 4: feast.core.Job.stream_ingestion:type_name -> feast.core.Job.StreamToOnlineMeta - 18, // 5: feast.core.StartOfflineToOnlineIngestionJobRequest.start_date:type_name -> google.protobuf.Timestamp - 18, // 6: feast.core.StartOfflineToOnlineIngestionJobRequest.end_date:type_name -> google.protobuf.Timestamp - 19, // 7: feast.core.GetHistoricalFeaturesRequest.entity_source:type_name -> feast.core.DataSource - 2, // 8: feast.core.ListJobsResponse.jobs:type_name -> feast.core.Job - 2, // 9: feast.core.GetJobResponse.job:type_name -> feast.core.Job - 3, // 10: feast.core.JobService.StartOfflineToOnlineIngestionJob:input_type -> feast.core.StartOfflineToOnlineIngestionJobRequest - 5, // 11: feast.core.JobService.GetHistoricalFeatures:input_type -> feast.core.GetHistoricalFeaturesRequest - 7, // 12: feast.core.JobService.StartStreamToOnlineIngestionJob:input_type -> feast.core.StartStreamToOnlineIngestionJobRequest - 9, // 13: feast.core.JobService.ListJobs:input_type -> feast.core.ListJobsRequest - 13, // 14: feast.core.JobService.CancelJob:input_type -> feast.core.CancelJobRequest - 11, // 15: feast.core.JobService.GetJob:input_type -> feast.core.GetJobRequest - 4, // 16: feast.core.JobService.StartOfflineToOnlineIngestionJob:output_type -> feast.core.StartOfflineToOnlineIngestionJobResponse - 6, // 17: feast.core.JobService.GetHistoricalFeatures:output_type -> feast.core.GetHistoricalFeaturesResponse - 8, // 18: feast.core.JobService.StartStreamToOnlineIngestionJob:output_type -> feast.core.StartStreamToOnlineIngestionJobResponse - 10, // 19: feast.core.JobService.ListJobs:output_type -> feast.core.ListJobsResponse - 14, // 20: feast.core.JobService.CancelJob:output_type -> feast.core.CancelJobResponse - 12, // 21: feast.core.JobService.GetJob:output_type -> feast.core.GetJobResponse - 16, // [16:22] is the sub-list for method output_type - 10, // [10:16] is the sub-list for method input_type - 10, // [10:10] is the sub-list for extension type_name - 10, // [10:10] is the sub-list for extension extendee - 0, // [0:10] is the sub-list for field type_name -} - -func init() { file_feast_core_JobService_proto_init() } -func file_feast_core_JobService_proto_init() { - if File_feast_core_JobService_proto != nil { - return - } - file_feast_core_DataSource_proto_init() - if !protoimpl.UnsafeEnabled { - file_feast_core_JobService_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Job); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_JobService_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*StartOfflineToOnlineIngestionJobRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_JobService_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*StartOfflineToOnlineIngestionJobResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_JobService_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetHistoricalFeaturesRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_JobService_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetHistoricalFeaturesResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_JobService_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*StartStreamToOnlineIngestionJobRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_JobService_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*StartStreamToOnlineIngestionJobResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_JobService_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListJobsRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_JobService_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListJobsResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_JobService_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetJobRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_JobService_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetJobResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_JobService_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CancelJobRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_JobService_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CancelJobResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_JobService_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Job_RetrievalJobMeta); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_JobService_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Job_OfflineToOnlineMeta); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_JobService_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Job_StreamToOnlineMeta); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - file_feast_core_JobService_proto_msgTypes[0].OneofWrappers = []interface{}{ - (*Job_Retrieval)(nil), - (*Job_BatchIngestion)(nil), - (*Job_StreamIngestion)(nil), - } - type x struct{} - out := protoimpl.TypeBuilder{ - File: protoimpl.DescBuilder{ - GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_feast_core_JobService_proto_rawDesc, - NumEnums: 2, - NumMessages: 16, - NumExtensions: 0, - NumServices: 1, - }, - GoTypes: file_feast_core_JobService_proto_goTypes, - DependencyIndexes: file_feast_core_JobService_proto_depIdxs, - EnumInfos: file_feast_core_JobService_proto_enumTypes, - MessageInfos: file_feast_core_JobService_proto_msgTypes, - }.Build() - File_feast_core_JobService_proto = out.File - file_feast_core_JobService_proto_rawDesc = nil - file_feast_core_JobService_proto_goTypes = nil - file_feast_core_JobService_proto_depIdxs = nil -} - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConnInterface - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion6 - -// JobServiceClient is the client API for JobService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. -type JobServiceClient interface { - // Start job to ingest data from offline store into online store - StartOfflineToOnlineIngestionJob(ctx context.Context, in *StartOfflineToOnlineIngestionJobRequest, opts ...grpc.CallOption) (*StartOfflineToOnlineIngestionJobResponse, error) - // Produce a training dataset, return a job id that will provide a file reference - GetHistoricalFeatures(ctx context.Context, in *GetHistoricalFeaturesRequest, opts ...grpc.CallOption) (*GetHistoricalFeaturesResponse, error) - // Start job to ingest data from stream into online store - StartStreamToOnlineIngestionJob(ctx context.Context, in *StartStreamToOnlineIngestionJobRequest, opts ...grpc.CallOption) (*StartStreamToOnlineIngestionJobResponse, error) - // List all types of jobs - ListJobs(ctx context.Context, in *ListJobsRequest, opts ...grpc.CallOption) (*ListJobsResponse, error) - // Cancel a single job - CancelJob(ctx context.Context, in *CancelJobRequest, opts ...grpc.CallOption) (*CancelJobResponse, error) - // Get details of a single job - GetJob(ctx context.Context, in *GetJobRequest, opts ...grpc.CallOption) (*GetJobResponse, error) -} - -type jobServiceClient struct { - cc grpc.ClientConnInterface -} - -func NewJobServiceClient(cc grpc.ClientConnInterface) JobServiceClient { - return &jobServiceClient{cc} -} - -func (c *jobServiceClient) StartOfflineToOnlineIngestionJob(ctx context.Context, in *StartOfflineToOnlineIngestionJobRequest, opts ...grpc.CallOption) (*StartOfflineToOnlineIngestionJobResponse, error) { - out := new(StartOfflineToOnlineIngestionJobResponse) - err := c.cc.Invoke(ctx, "/feast.core.JobService/StartOfflineToOnlineIngestionJob", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *jobServiceClient) GetHistoricalFeatures(ctx context.Context, in *GetHistoricalFeaturesRequest, opts ...grpc.CallOption) (*GetHistoricalFeaturesResponse, error) { - out := new(GetHistoricalFeaturesResponse) - err := c.cc.Invoke(ctx, "/feast.core.JobService/GetHistoricalFeatures", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *jobServiceClient) StartStreamToOnlineIngestionJob(ctx context.Context, in *StartStreamToOnlineIngestionJobRequest, opts ...grpc.CallOption) (*StartStreamToOnlineIngestionJobResponse, error) { - out := new(StartStreamToOnlineIngestionJobResponse) - err := c.cc.Invoke(ctx, "/feast.core.JobService/StartStreamToOnlineIngestionJob", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *jobServiceClient) ListJobs(ctx context.Context, in *ListJobsRequest, opts ...grpc.CallOption) (*ListJobsResponse, error) { - out := new(ListJobsResponse) - err := c.cc.Invoke(ctx, "/feast.core.JobService/ListJobs", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *jobServiceClient) CancelJob(ctx context.Context, in *CancelJobRequest, opts ...grpc.CallOption) (*CancelJobResponse, error) { - out := new(CancelJobResponse) - err := c.cc.Invoke(ctx, "/feast.core.JobService/CancelJob", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *jobServiceClient) GetJob(ctx context.Context, in *GetJobRequest, opts ...grpc.CallOption) (*GetJobResponse, error) { - out := new(GetJobResponse) - err := c.cc.Invoke(ctx, "/feast.core.JobService/GetJob", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// JobServiceServer is the server API for JobService service. -type JobServiceServer interface { - // Start job to ingest data from offline store into online store - StartOfflineToOnlineIngestionJob(context.Context, *StartOfflineToOnlineIngestionJobRequest) (*StartOfflineToOnlineIngestionJobResponse, error) - // Produce a training dataset, return a job id that will provide a file reference - GetHistoricalFeatures(context.Context, *GetHistoricalFeaturesRequest) (*GetHistoricalFeaturesResponse, error) - // Start job to ingest data from stream into online store - StartStreamToOnlineIngestionJob(context.Context, *StartStreamToOnlineIngestionJobRequest) (*StartStreamToOnlineIngestionJobResponse, error) - // List all types of jobs - ListJobs(context.Context, *ListJobsRequest) (*ListJobsResponse, error) - // Cancel a single job - CancelJob(context.Context, *CancelJobRequest) (*CancelJobResponse, error) - // Get details of a single job - GetJob(context.Context, *GetJobRequest) (*GetJobResponse, error) -} - -// UnimplementedJobServiceServer can be embedded to have forward compatible implementations. -type UnimplementedJobServiceServer struct { -} - -func (*UnimplementedJobServiceServer) StartOfflineToOnlineIngestionJob(context.Context, *StartOfflineToOnlineIngestionJobRequest) (*StartOfflineToOnlineIngestionJobResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method StartOfflineToOnlineIngestionJob not implemented") -} -func (*UnimplementedJobServiceServer) GetHistoricalFeatures(context.Context, *GetHistoricalFeaturesRequest) (*GetHistoricalFeaturesResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetHistoricalFeatures not implemented") -} -func (*UnimplementedJobServiceServer) StartStreamToOnlineIngestionJob(context.Context, *StartStreamToOnlineIngestionJobRequest) (*StartStreamToOnlineIngestionJobResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method StartStreamToOnlineIngestionJob not implemented") -} -func (*UnimplementedJobServiceServer) ListJobs(context.Context, *ListJobsRequest) (*ListJobsResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method ListJobs not implemented") -} -func (*UnimplementedJobServiceServer) CancelJob(context.Context, *CancelJobRequest) (*CancelJobResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method CancelJob not implemented") -} -func (*UnimplementedJobServiceServer) GetJob(context.Context, *GetJobRequest) (*GetJobResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetJob not implemented") -} - -func RegisterJobServiceServer(s *grpc.Server, srv JobServiceServer) { - s.RegisterService(&_JobService_serviceDesc, srv) -} - -func _JobService_StartOfflineToOnlineIngestionJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(StartOfflineToOnlineIngestionJobRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(JobServiceServer).StartOfflineToOnlineIngestionJob(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.core.JobService/StartOfflineToOnlineIngestionJob", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(JobServiceServer).StartOfflineToOnlineIngestionJob(ctx, req.(*StartOfflineToOnlineIngestionJobRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _JobService_GetHistoricalFeatures_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetHistoricalFeaturesRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(JobServiceServer).GetHistoricalFeatures(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.core.JobService/GetHistoricalFeatures", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(JobServiceServer).GetHistoricalFeatures(ctx, req.(*GetHistoricalFeaturesRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _JobService_StartStreamToOnlineIngestionJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(StartStreamToOnlineIngestionJobRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(JobServiceServer).StartStreamToOnlineIngestionJob(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.core.JobService/StartStreamToOnlineIngestionJob", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(JobServiceServer).StartStreamToOnlineIngestionJob(ctx, req.(*StartStreamToOnlineIngestionJobRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _JobService_ListJobs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(ListJobsRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(JobServiceServer).ListJobs(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.core.JobService/ListJobs", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(JobServiceServer).ListJobs(ctx, req.(*ListJobsRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _JobService_CancelJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(CancelJobRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(JobServiceServer).CancelJob(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.core.JobService/CancelJob", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(JobServiceServer).CancelJob(ctx, req.(*CancelJobRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _JobService_GetJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetJobRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(JobServiceServer).GetJob(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.core.JobService/GetJob", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(JobServiceServer).GetJob(ctx, req.(*GetJobRequest)) - } - return interceptor(ctx, in, info, handler) -} - -var _JobService_serviceDesc = grpc.ServiceDesc{ - ServiceName: "feast.core.JobService", - HandlerType: (*JobServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "StartOfflineToOnlineIngestionJob", - Handler: _JobService_StartOfflineToOnlineIngestionJob_Handler, - }, - { - MethodName: "GetHistoricalFeatures", - Handler: _JobService_GetHistoricalFeatures_Handler, - }, - { - MethodName: "StartStreamToOnlineIngestionJob", - Handler: _JobService_StartStreamToOnlineIngestionJob_Handler, - }, - { - MethodName: "ListJobs", - Handler: _JobService_ListJobs_Handler, - }, - { - MethodName: "CancelJob", - Handler: _JobService_CancelJob_Handler, - }, - { - MethodName: "GetJob", - Handler: _JobService_GetJob_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "feast/core/JobService.proto", -} diff --git a/sdk/go/protos/feast/core/Runner.pb.go b/sdk/go/protos/feast/core/Runner.pb.go deleted file mode 100644 index 763695f6c8..0000000000 --- a/sdk/go/protos/feast/core/Runner.pb.go +++ /dev/null @@ -1,467 +0,0 @@ -// -// * Copyright 2020 The Feast Authors -// * -// * Licensed under the Apache License, Version 2.0 (the "License"); -// * you may not use this file except in compliance with the License. -// * You may obtain a copy of the License at -// * -// * https://www.apache.org/licenses/LICENSE-2.0 -// * -// * Unless required by applicable law or agreed to in writing, software -// * distributed under the License is distributed on an "AS IS" BASIS, -// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// * See the License for the specific language governing permissions and -// * limitations under the License. -// - -// Code generated by protoc-gen-go. DO NOT EDIT. -// versions: -// protoc-gen-go v1.25.0 -// protoc v3.10.0 -// source: feast/core/Runner.proto - -package core - -import ( - proto "github.com/golang/protobuf/proto" - protoreflect "google.golang.org/protobuf/reflect/protoreflect" - protoimpl "google.golang.org/protobuf/runtime/protoimpl" - reflect "reflect" - sync "sync" -) - -const ( - // Verify that this generated code is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) - // Verify that runtime/protoimpl is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) -) - -// This is a compile-time assertion that a sufficiently up-to-date version -// of the legacy proto package is being used. -const _ = proto.ProtoPackageIsVersion4 - -type DirectRunnerConfigOptions struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - //* - // Controls the amount of target parallelism the DirectRunner will use. - // Defaults to the greater of the number of available processors and 3. Must be a value - // greater than zero. - TargetParallelism int32 `protobuf:"varint,1,opt,name=targetParallelism,proto3" json:"targetParallelism,omitempty"` - // BigQuery table specification, e.g. PROJECT_ID:DATASET_ID.PROJECT_ID - DeadLetterTableSpec string `protobuf:"bytes,2,opt,name=deadLetterTableSpec,proto3" json:"deadLetterTableSpec,omitempty"` - // A pipeline level default location for storing temporary files. - // Support Google Cloud Storage locations or local path - TempLocation string `protobuf:"bytes,3,opt,name=tempLocation,proto3" json:"tempLocation,omitempty"` -} - -func (x *DirectRunnerConfigOptions) Reset() { - *x = DirectRunnerConfigOptions{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_Runner_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *DirectRunnerConfigOptions) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*DirectRunnerConfigOptions) ProtoMessage() {} - -func (x *DirectRunnerConfigOptions) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_Runner_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use DirectRunnerConfigOptions.ProtoReflect.Descriptor instead. -func (*DirectRunnerConfigOptions) Descriptor() ([]byte, []int) { - return file_feast_core_Runner_proto_rawDescGZIP(), []int{0} -} - -func (x *DirectRunnerConfigOptions) GetTargetParallelism() int32 { - if x != nil { - return x.TargetParallelism - } - return 0 -} - -func (x *DirectRunnerConfigOptions) GetDeadLetterTableSpec() string { - if x != nil { - return x.DeadLetterTableSpec - } - return "" -} - -func (x *DirectRunnerConfigOptions) GetTempLocation() string { - if x != nil { - return x.TempLocation - } - return "" -} - -type DataflowRunnerConfigOptions struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Project id to use when launching jobs. - Project string `protobuf:"bytes,1,opt,name=project,proto3" json:"project,omitempty"` - // The Google Compute Engine region for creating Dataflow jobs. - Region string `protobuf:"bytes,2,opt,name=region,proto3" json:"region,omitempty"` - // GCP availability zone for operations. - WorkerZone string `protobuf:"bytes,3,opt,name=workerZone,proto3" json:"workerZone,omitempty"` - // Run the job as a specific service account, instead of the default GCE robot. - ServiceAccount string `protobuf:"bytes,4,opt,name=serviceAccount,proto3" json:"serviceAccount,omitempty"` - // GCE network for launching workers. - Network string `protobuf:"bytes,5,opt,name=network,proto3" json:"network,omitempty"` - // GCE subnetwork for launching workers. e.g. regions/asia-east1/subnetworks/mysubnetwork - Subnetwork string `protobuf:"bytes,6,opt,name=subnetwork,proto3" json:"subnetwork,omitempty"` - // Machine type to create Dataflow worker VMs as. - WorkerMachineType string `protobuf:"bytes,7,opt,name=workerMachineType,proto3" json:"workerMachineType,omitempty"` - // The autoscaling algorithm to use for the workerpool. - AutoscalingAlgorithm string `protobuf:"bytes,8,opt,name=autoscalingAlgorithm,proto3" json:"autoscalingAlgorithm,omitempty"` - // Specifies whether worker pools should be started with public IP addresses. - UsePublicIps bool `protobuf:"varint,9,opt,name=usePublicIps,proto3" json:"usePublicIps,omitempty"` - // A pipeline level default location for storing temporary files. Support Google Cloud Storage locations, - // e.g. gs://bucket/object - TempLocation string `protobuf:"bytes,10,opt,name=tempLocation,proto3" json:"tempLocation,omitempty"` - // The maximum number of workers to use for the workerpool. - MaxNumWorkers int32 `protobuf:"varint,11,opt,name=maxNumWorkers,proto3" json:"maxNumWorkers,omitempty"` - // BigQuery table specification, e.g. PROJECT_ID:DATASET_ID.PROJECT_ID - DeadLetterTableSpec string `protobuf:"bytes,12,opt,name=deadLetterTableSpec,proto3" json:"deadLetterTableSpec,omitempty"` - // Labels to apply to the dataflow job - Labels map[string]string `protobuf:"bytes,13,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - // Disk size to use on each remote Compute Engine worker instance - DiskSizeGb int32 `protobuf:"varint,14,opt,name=diskSizeGb,proto3" json:"diskSizeGb,omitempty"` - // Run job on Dataflow Streaming Engine instead of creating worker VMs - EnableStreamingEngine bool `protobuf:"varint,15,opt,name=enableStreamingEngine,proto3" json:"enableStreamingEngine,omitempty"` - // Type of persistent disk to be used by workers - WorkerDiskType string `protobuf:"bytes,16,opt,name=workerDiskType,proto3" json:"workerDiskType,omitempty"` - // Kafka consumer configuration properties - KafkaConsumerProperties map[string]string `protobuf:"bytes,17,rep,name=kafkaConsumerProperties,proto3" json:"kafkaConsumerProperties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` -} - -func (x *DataflowRunnerConfigOptions) Reset() { - *x = DataflowRunnerConfigOptions{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_Runner_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *DataflowRunnerConfigOptions) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*DataflowRunnerConfigOptions) ProtoMessage() {} - -func (x *DataflowRunnerConfigOptions) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_Runner_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use DataflowRunnerConfigOptions.ProtoReflect.Descriptor instead. -func (*DataflowRunnerConfigOptions) Descriptor() ([]byte, []int) { - return file_feast_core_Runner_proto_rawDescGZIP(), []int{1} -} - -func (x *DataflowRunnerConfigOptions) GetProject() string { - if x != nil { - return x.Project - } - return "" -} - -func (x *DataflowRunnerConfigOptions) GetRegion() string { - if x != nil { - return x.Region - } - return "" -} - -func (x *DataflowRunnerConfigOptions) GetWorkerZone() string { - if x != nil { - return x.WorkerZone - } - return "" -} - -func (x *DataflowRunnerConfigOptions) GetServiceAccount() string { - if x != nil { - return x.ServiceAccount - } - return "" -} - -func (x *DataflowRunnerConfigOptions) GetNetwork() string { - if x != nil { - return x.Network - } - return "" -} - -func (x *DataflowRunnerConfigOptions) GetSubnetwork() string { - if x != nil { - return x.Subnetwork - } - return "" -} - -func (x *DataflowRunnerConfigOptions) GetWorkerMachineType() string { - if x != nil { - return x.WorkerMachineType - } - return "" -} - -func (x *DataflowRunnerConfigOptions) GetAutoscalingAlgorithm() string { - if x != nil { - return x.AutoscalingAlgorithm - } - return "" -} - -func (x *DataflowRunnerConfigOptions) GetUsePublicIps() bool { - if x != nil { - return x.UsePublicIps - } - return false -} - -func (x *DataflowRunnerConfigOptions) GetTempLocation() string { - if x != nil { - return x.TempLocation - } - return "" -} - -func (x *DataflowRunnerConfigOptions) GetMaxNumWorkers() int32 { - if x != nil { - return x.MaxNumWorkers - } - return 0 -} - -func (x *DataflowRunnerConfigOptions) GetDeadLetterTableSpec() string { - if x != nil { - return x.DeadLetterTableSpec - } - return "" -} - -func (x *DataflowRunnerConfigOptions) GetLabels() map[string]string { - if x != nil { - return x.Labels - } - return nil -} - -func (x *DataflowRunnerConfigOptions) GetDiskSizeGb() int32 { - if x != nil { - return x.DiskSizeGb - } - return 0 -} - -func (x *DataflowRunnerConfigOptions) GetEnableStreamingEngine() bool { - if x != nil { - return x.EnableStreamingEngine - } - return false -} - -func (x *DataflowRunnerConfigOptions) GetWorkerDiskType() string { - if x != nil { - return x.WorkerDiskType - } - return "" -} - -func (x *DataflowRunnerConfigOptions) GetKafkaConsumerProperties() map[string]string { - if x != nil { - return x.KafkaConsumerProperties - } - return nil -} - -var File_feast_core_Runner_proto protoreflect.FileDescriptor - -var file_feast_core_Runner_proto_rawDesc = []byte{ - 0x0a, 0x17, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x52, 0x75, 0x6e, - 0x6e, 0x65, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0a, 0x66, 0x65, 0x61, 0x73, 0x74, - 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x22, 0x9f, 0x01, 0x0a, 0x19, 0x44, 0x69, 0x72, 0x65, 0x63, 0x74, - 0x52, 0x75, 0x6e, 0x6e, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x4f, 0x70, 0x74, 0x69, - 0x6f, 0x6e, 0x73, 0x12, 0x2c, 0x0a, 0x11, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x50, 0x61, 0x72, - 0x61, 0x6c, 0x6c, 0x65, 0x6c, 0x69, 0x73, 0x6d, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x11, - 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6c, 0x6c, 0x65, 0x6c, 0x69, 0x73, - 0x6d, 0x12, 0x30, 0x0a, 0x13, 0x64, 0x65, 0x61, 0x64, 0x4c, 0x65, 0x74, 0x74, 0x65, 0x72, 0x54, - 0x61, 0x62, 0x6c, 0x65, 0x53, 0x70, 0x65, 0x63, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, - 0x64, 0x65, 0x61, 0x64, 0x4c, 0x65, 0x74, 0x74, 0x65, 0x72, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x53, - 0x70, 0x65, 0x63, 0x12, 0x22, 0x0a, 0x0c, 0x74, 0x65, 0x6d, 0x70, 0x4c, 0x6f, 0x63, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x74, 0x65, 0x6d, 0x70, 0x4c, - 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0xa5, 0x07, 0x0a, 0x1b, 0x44, 0x61, 0x74, 0x61, - 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x6e, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, - 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x72, 0x6f, 0x6a, 0x65, - 0x63, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, - 0x74, 0x12, 0x16, 0x0a, 0x06, 0x72, 0x65, 0x67, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x06, 0x72, 0x65, 0x67, 0x69, 0x6f, 0x6e, 0x12, 0x1e, 0x0a, 0x0a, 0x77, 0x6f, 0x72, - 0x6b, 0x65, 0x72, 0x5a, 0x6f, 0x6e, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x77, - 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x5a, 0x6f, 0x6e, 0x65, 0x12, 0x26, 0x0a, 0x0e, 0x73, 0x65, 0x72, - 0x76, 0x69, 0x63, 0x65, 0x41, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x0e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x41, 0x63, 0x63, 0x6f, 0x75, 0x6e, - 0x74, 0x12, 0x18, 0x0a, 0x07, 0x6e, 0x65, 0x74, 0x77, 0x6f, 0x72, 0x6b, 0x18, 0x05, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x07, 0x6e, 0x65, 0x74, 0x77, 0x6f, 0x72, 0x6b, 0x12, 0x1e, 0x0a, 0x0a, 0x73, - 0x75, 0x62, 0x6e, 0x65, 0x74, 0x77, 0x6f, 0x72, 0x6b, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x0a, 0x73, 0x75, 0x62, 0x6e, 0x65, 0x74, 0x77, 0x6f, 0x72, 0x6b, 0x12, 0x2c, 0x0a, 0x11, 0x77, - 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4d, 0x61, 0x63, 0x68, 0x69, 0x6e, 0x65, 0x54, 0x79, 0x70, 0x65, - 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x11, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4d, 0x61, - 0x63, 0x68, 0x69, 0x6e, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x32, 0x0a, 0x14, 0x61, 0x75, 0x74, - 0x6f, 0x73, 0x63, 0x61, 0x6c, 0x69, 0x6e, 0x67, 0x41, 0x6c, 0x67, 0x6f, 0x72, 0x69, 0x74, 0x68, - 0x6d, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x14, 0x61, 0x75, 0x74, 0x6f, 0x73, 0x63, 0x61, - 0x6c, 0x69, 0x6e, 0x67, 0x41, 0x6c, 0x67, 0x6f, 0x72, 0x69, 0x74, 0x68, 0x6d, 0x12, 0x22, 0x0a, - 0x0c, 0x75, 0x73, 0x65, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x49, 0x70, 0x73, 0x18, 0x09, 0x20, - 0x01, 0x28, 0x08, 0x52, 0x0c, 0x75, 0x73, 0x65, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x49, 0x70, - 0x73, 0x12, 0x22, 0x0a, 0x0c, 0x74, 0x65, 0x6d, 0x70, 0x4c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x74, 0x65, 0x6d, 0x70, 0x4c, 0x6f, 0x63, - 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x24, 0x0a, 0x0d, 0x6d, 0x61, 0x78, 0x4e, 0x75, 0x6d, 0x57, - 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x73, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0d, 0x6d, 0x61, - 0x78, 0x4e, 0x75, 0x6d, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x73, 0x12, 0x30, 0x0a, 0x13, 0x64, - 0x65, 0x61, 0x64, 0x4c, 0x65, 0x74, 0x74, 0x65, 0x72, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x53, 0x70, - 0x65, 0x63, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x64, 0x65, 0x61, 0x64, 0x4c, 0x65, - 0x74, 0x74, 0x65, 0x72, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x53, 0x70, 0x65, 0x63, 0x12, 0x4b, 0x0a, - 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, 0x0d, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x33, 0x2e, - 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x66, - 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x6e, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x4f, - 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, - 0x72, 0x79, 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x12, 0x1e, 0x0a, 0x0a, 0x64, 0x69, - 0x73, 0x6b, 0x53, 0x69, 0x7a, 0x65, 0x47, 0x62, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0a, - 0x64, 0x69, 0x73, 0x6b, 0x53, 0x69, 0x7a, 0x65, 0x47, 0x62, 0x12, 0x34, 0x0a, 0x15, 0x65, 0x6e, - 0x61, 0x62, 0x6c, 0x65, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x45, 0x6e, 0x67, - 0x69, 0x6e, 0x65, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x08, 0x52, 0x15, 0x65, 0x6e, 0x61, 0x62, 0x6c, - 0x65, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x45, 0x6e, 0x67, 0x69, 0x6e, 0x65, - 0x12, 0x26, 0x0a, 0x0e, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x44, 0x69, 0x73, 0x6b, 0x54, 0x79, - 0x70, 0x65, 0x18, 0x10, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, - 0x44, 0x69, 0x73, 0x6b, 0x54, 0x79, 0x70, 0x65, 0x12, 0x7e, 0x0a, 0x17, 0x6b, 0x61, 0x66, 0x6b, - 0x61, 0x43, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x72, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, - 0x69, 0x65, 0x73, 0x18, 0x11, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x44, 0x2e, 0x66, 0x65, 0x61, 0x73, - 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x66, 0x6c, 0x6f, 0x77, 0x52, - 0x75, 0x6e, 0x6e, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x4f, 0x70, 0x74, 0x69, 0x6f, - 0x6e, 0x73, 0x2e, 0x4b, 0x61, 0x66, 0x6b, 0x61, 0x43, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x72, - 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, - 0x17, 0x6b, 0x61, 0x66, 0x6b, 0x61, 0x43, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x72, 0x50, 0x72, - 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x1a, 0x39, 0x0a, 0x0b, 0x4c, 0x61, 0x62, 0x65, - 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, - 0x02, 0x38, 0x01, 0x1a, 0x4a, 0x0a, 0x1c, 0x4b, 0x61, 0x66, 0x6b, 0x61, 0x43, 0x6f, 0x6e, 0x73, - 0x75, 0x6d, 0x65, 0x72, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x45, 0x6e, - 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x42, - 0x54, 0x0a, 0x10, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x63, - 0x6f, 0x72, 0x65, 0x42, 0x0b, 0x52, 0x75, 0x6e, 0x6e, 0x65, 0x72, 0x50, 0x72, 0x6f, 0x74, 0x6f, - 0x5a, 0x33, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x65, 0x61, - 0x73, 0x74, 0x2d, 0x64, 0x65, 0x76, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x73, 0x64, 0x6b, - 0x2f, 0x67, 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, - 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} - -var ( - file_feast_core_Runner_proto_rawDescOnce sync.Once - file_feast_core_Runner_proto_rawDescData = file_feast_core_Runner_proto_rawDesc -) - -func file_feast_core_Runner_proto_rawDescGZIP() []byte { - file_feast_core_Runner_proto_rawDescOnce.Do(func() { - file_feast_core_Runner_proto_rawDescData = protoimpl.X.CompressGZIP(file_feast_core_Runner_proto_rawDescData) - }) - return file_feast_core_Runner_proto_rawDescData -} - -var file_feast_core_Runner_proto_msgTypes = make([]protoimpl.MessageInfo, 4) -var file_feast_core_Runner_proto_goTypes = []interface{}{ - (*DirectRunnerConfigOptions)(nil), // 0: feast.core.DirectRunnerConfigOptions - (*DataflowRunnerConfigOptions)(nil), // 1: feast.core.DataflowRunnerConfigOptions - nil, // 2: feast.core.DataflowRunnerConfigOptions.LabelsEntry - nil, // 3: feast.core.DataflowRunnerConfigOptions.KafkaConsumerPropertiesEntry -} -var file_feast_core_Runner_proto_depIdxs = []int32{ - 2, // 0: feast.core.DataflowRunnerConfigOptions.labels:type_name -> feast.core.DataflowRunnerConfigOptions.LabelsEntry - 3, // 1: feast.core.DataflowRunnerConfigOptions.kafkaConsumerProperties:type_name -> feast.core.DataflowRunnerConfigOptions.KafkaConsumerPropertiesEntry - 2, // [2:2] is the sub-list for method output_type - 2, // [2:2] is the sub-list for method input_type - 2, // [2:2] is the sub-list for extension type_name - 2, // [2:2] is the sub-list for extension extendee - 0, // [0:2] is the sub-list for field type_name -} - -func init() { file_feast_core_Runner_proto_init() } -func file_feast_core_Runner_proto_init() { - if File_feast_core_Runner_proto != nil { - return - } - if !protoimpl.UnsafeEnabled { - file_feast_core_Runner_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DirectRunnerConfigOptions); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_Runner_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DataflowRunnerConfigOptions); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - type x struct{} - out := protoimpl.TypeBuilder{ - File: protoimpl.DescBuilder{ - GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_feast_core_Runner_proto_rawDesc, - NumEnums: 0, - NumMessages: 4, - NumExtensions: 0, - NumServices: 0, - }, - GoTypes: file_feast_core_Runner_proto_goTypes, - DependencyIndexes: file_feast_core_Runner_proto_depIdxs, - MessageInfos: file_feast_core_Runner_proto_msgTypes, - }.Build() - File_feast_core_Runner_proto = out.File - file_feast_core_Runner_proto_rawDesc = nil - file_feast_core_Runner_proto_goTypes = nil - file_feast_core_Runner_proto_depIdxs = nil -} diff --git a/sdk/go/protos/feast/core/Source.pb.go b/sdk/go/protos/feast/core/Source.pb.go deleted file mode 100644 index af7f9783bb..0000000000 --- a/sdk/go/protos/feast/core/Source.pb.go +++ /dev/null @@ -1,360 +0,0 @@ -// -// * Copyright 2019 The Feast Authors -// * -// * Licensed under the Apache License, Version 2.0 (the "License"); -// * you may not use this file except in compliance with the License. -// * You may obtain a copy of the License at -// * -// * https://www.apache.org/licenses/LICENSE-2.0 -// * -// * Unless required by applicable law or agreed to in writing, software -// * distributed under the License is distributed on an "AS IS" BASIS, -// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// * See the License for the specific language governing permissions and -// * limitations under the License. -// - -// Code generated by protoc-gen-go. DO NOT EDIT. -// versions: -// protoc-gen-go v1.25.0 -// protoc v3.10.0 -// source: feast/core/Source.proto - -package core - -import ( - proto "github.com/golang/protobuf/proto" - protoreflect "google.golang.org/protobuf/reflect/protoreflect" - protoimpl "google.golang.org/protobuf/runtime/protoimpl" - reflect "reflect" - sync "sync" -) - -const ( - // Verify that this generated code is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) - // Verify that runtime/protoimpl is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) -) - -// This is a compile-time assertion that a sufficiently up-to-date version -// of the legacy proto package is being used. -const _ = proto.ProtoPackageIsVersion4 - -type SourceType int32 - -const ( - SourceType_INVALID SourceType = 0 - SourceType_KAFKA SourceType = 1 -) - -// Enum value maps for SourceType. -var ( - SourceType_name = map[int32]string{ - 0: "INVALID", - 1: "KAFKA", - } - SourceType_value = map[string]int32{ - "INVALID": 0, - "KAFKA": 1, - } -) - -func (x SourceType) Enum() *SourceType { - p := new(SourceType) - *p = x - return p -} - -func (x SourceType) String() string { - return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) -} - -func (SourceType) Descriptor() protoreflect.EnumDescriptor { - return file_feast_core_Source_proto_enumTypes[0].Descriptor() -} - -func (SourceType) Type() protoreflect.EnumType { - return &file_feast_core_Source_proto_enumTypes[0] -} - -func (x SourceType) Number() protoreflect.EnumNumber { - return protoreflect.EnumNumber(x) -} - -// Deprecated: Use SourceType.Descriptor instead. -func (SourceType) EnumDescriptor() ([]byte, []int) { - return file_feast_core_Source_proto_rawDescGZIP(), []int{0} -} - -type Source struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // The kind of data source Feast should connect to in order to retrieve FeatureRow value - Type SourceType `protobuf:"varint,1,opt,name=type,proto3,enum=feast.core.SourceType" json:"type,omitempty"` - // Source specific configuration - // - // Types that are assignable to SourceConfig: - // *Source_KafkaSourceConfig - SourceConfig isSource_SourceConfig `protobuf_oneof:"source_config"` -} - -func (x *Source) Reset() { - *x = Source{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_Source_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Source) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Source) ProtoMessage() {} - -func (x *Source) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_Source_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Source.ProtoReflect.Descriptor instead. -func (*Source) Descriptor() ([]byte, []int) { - return file_feast_core_Source_proto_rawDescGZIP(), []int{0} -} - -func (x *Source) GetType() SourceType { - if x != nil { - return x.Type - } - return SourceType_INVALID -} - -func (m *Source) GetSourceConfig() isSource_SourceConfig { - if m != nil { - return m.SourceConfig - } - return nil -} - -func (x *Source) GetKafkaSourceConfig() *KafkaSourceConfig { - if x, ok := x.GetSourceConfig().(*Source_KafkaSourceConfig); ok { - return x.KafkaSourceConfig - } - return nil -} - -type isSource_SourceConfig interface { - isSource_SourceConfig() -} - -type Source_KafkaSourceConfig struct { - KafkaSourceConfig *KafkaSourceConfig `protobuf:"bytes,2,opt,name=kafka_source_config,json=kafkaSourceConfig,proto3,oneof"` -} - -func (*Source_KafkaSourceConfig) isSource_SourceConfig() {} - -type KafkaSourceConfig struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Comma separated list of Kafka bootstrap servers. Used for feature sets without a defined source host[:port]] - BootstrapServers string `protobuf:"bytes,1,opt,name=bootstrap_servers,json=bootstrapServers,proto3" json:"bootstrap_servers,omitempty"` - // Kafka topic to use for feature sets without user defined topics - Topic string `protobuf:"bytes,2,opt,name=topic,proto3" json:"topic,omitempty"` - // Number of Kafka partitions to to use for managed feature stream. - Partitions int32 `protobuf:"varint,3,opt,name=partitions,proto3" json:"partitions,omitempty"` - // Defines the number of copies of managed feature stream Kafka. - ReplicationFactor int32 `protobuf:"varint,4,opt,name=replicationFactor,proto3" json:"replicationFactor,omitempty"` -} - -func (x *KafkaSourceConfig) Reset() { - *x = KafkaSourceConfig{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_Source_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *KafkaSourceConfig) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*KafkaSourceConfig) ProtoMessage() {} - -func (x *KafkaSourceConfig) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_Source_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use KafkaSourceConfig.ProtoReflect.Descriptor instead. -func (*KafkaSourceConfig) Descriptor() ([]byte, []int) { - return file_feast_core_Source_proto_rawDescGZIP(), []int{1} -} - -func (x *KafkaSourceConfig) GetBootstrapServers() string { - if x != nil { - return x.BootstrapServers - } - return "" -} - -func (x *KafkaSourceConfig) GetTopic() string { - if x != nil { - return x.Topic - } - return "" -} - -func (x *KafkaSourceConfig) GetPartitions() int32 { - if x != nil { - return x.Partitions - } - return 0 -} - -func (x *KafkaSourceConfig) GetReplicationFactor() int32 { - if x != nil { - return x.ReplicationFactor - } - return 0 -} - -var File_feast_core_Source_proto protoreflect.FileDescriptor - -var file_feast_core_Source_proto_rawDesc = []byte{ - 0x0a, 0x17, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x53, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0a, 0x66, 0x65, 0x61, 0x73, 0x74, - 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x22, 0x96, 0x01, 0x0a, 0x06, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, - 0x12, 0x2a, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x16, - 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x6f, 0x75, 0x72, - 0x63, 0x65, 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x4f, 0x0a, 0x13, - 0x6b, 0x61, 0x66, 0x6b, 0x61, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x63, 0x6f, 0x6e, - 0x66, 0x69, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x66, 0x65, 0x61, 0x73, - 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4b, 0x61, 0x66, 0x6b, 0x61, 0x53, 0x6f, 0x75, 0x72, - 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x48, 0x00, 0x52, 0x11, 0x6b, 0x61, 0x66, 0x6b, - 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x42, 0x0f, 0x0a, - 0x0d, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x22, 0xa4, - 0x01, 0x0a, 0x11, 0x4b, 0x61, 0x66, 0x6b, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x6f, - 0x6e, 0x66, 0x69, 0x67, 0x12, 0x2b, 0x0a, 0x11, 0x62, 0x6f, 0x6f, 0x74, 0x73, 0x74, 0x72, 0x61, - 0x70, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x10, 0x62, 0x6f, 0x6f, 0x74, 0x73, 0x74, 0x72, 0x61, 0x70, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, - 0x73, 0x12, 0x14, 0x0a, 0x05, 0x74, 0x6f, 0x70, 0x69, 0x63, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x05, 0x74, 0x6f, 0x70, 0x69, 0x63, 0x12, 0x1e, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x74, 0x69, - 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0a, 0x70, 0x61, 0x72, - 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x2c, 0x0a, 0x11, 0x72, 0x65, 0x70, 0x6c, 0x69, - 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x61, 0x63, 0x74, 0x6f, 0x72, 0x18, 0x04, 0x20, 0x01, - 0x28, 0x05, 0x52, 0x11, 0x72, 0x65, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, - 0x61, 0x63, 0x74, 0x6f, 0x72, 0x2a, 0x24, 0x0a, 0x0a, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x54, - 0x79, 0x70, 0x65, 0x12, 0x0b, 0x0a, 0x07, 0x49, 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, 0x10, 0x00, - 0x12, 0x09, 0x0a, 0x05, 0x4b, 0x41, 0x46, 0x4b, 0x41, 0x10, 0x01, 0x42, 0x54, 0x0a, 0x10, 0x66, - 0x65, 0x61, 0x73, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x42, - 0x0b, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x5a, 0x33, 0x67, 0x69, - 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2d, 0x64, - 0x65, 0x76, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x73, 0x64, 0x6b, 0x2f, 0x67, 0x6f, 0x2f, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x63, 0x6f, 0x72, - 0x65, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} - -var ( - file_feast_core_Source_proto_rawDescOnce sync.Once - file_feast_core_Source_proto_rawDescData = file_feast_core_Source_proto_rawDesc -) - -func file_feast_core_Source_proto_rawDescGZIP() []byte { - file_feast_core_Source_proto_rawDescOnce.Do(func() { - file_feast_core_Source_proto_rawDescData = protoimpl.X.CompressGZIP(file_feast_core_Source_proto_rawDescData) - }) - return file_feast_core_Source_proto_rawDescData -} - -var file_feast_core_Source_proto_enumTypes = make([]protoimpl.EnumInfo, 1) -var file_feast_core_Source_proto_msgTypes = make([]protoimpl.MessageInfo, 2) -var file_feast_core_Source_proto_goTypes = []interface{}{ - (SourceType)(0), // 0: feast.core.SourceType - (*Source)(nil), // 1: feast.core.Source - (*KafkaSourceConfig)(nil), // 2: feast.core.KafkaSourceConfig -} -var file_feast_core_Source_proto_depIdxs = []int32{ - 0, // 0: feast.core.Source.type:type_name -> feast.core.SourceType - 2, // 1: feast.core.Source.kafka_source_config:type_name -> feast.core.KafkaSourceConfig - 2, // [2:2] is the sub-list for method output_type - 2, // [2:2] is the sub-list for method input_type - 2, // [2:2] is the sub-list for extension type_name - 2, // [2:2] is the sub-list for extension extendee - 0, // [0:2] is the sub-list for field type_name -} - -func init() { file_feast_core_Source_proto_init() } -func file_feast_core_Source_proto_init() { - if File_feast_core_Source_proto != nil { - return - } - if !protoimpl.UnsafeEnabled { - file_feast_core_Source_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Source); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_Source_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*KafkaSourceConfig); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - file_feast_core_Source_proto_msgTypes[0].OneofWrappers = []interface{}{ - (*Source_KafkaSourceConfig)(nil), - } - type x struct{} - out := protoimpl.TypeBuilder{ - File: protoimpl.DescBuilder{ - GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_feast_core_Source_proto_rawDesc, - NumEnums: 1, - NumMessages: 2, - NumExtensions: 0, - NumServices: 0, - }, - GoTypes: file_feast_core_Source_proto_goTypes, - DependencyIndexes: file_feast_core_Source_proto_depIdxs, - EnumInfos: file_feast_core_Source_proto_enumTypes, - MessageInfos: file_feast_core_Source_proto_msgTypes, - }.Build() - File_feast_core_Source_proto = out.File - file_feast_core_Source_proto_rawDesc = nil - file_feast_core_Source_proto_goTypes = nil - file_feast_core_Source_proto_depIdxs = nil -} diff --git a/sdk/go/protos/feast/core/Store.pb.go b/sdk/go/protos/feast/core/Store.pb.go deleted file mode 100644 index 9498c6a40c..0000000000 --- a/sdk/go/protos/feast/core/Store.pb.go +++ /dev/null @@ -1,675 +0,0 @@ -// -// * Copyright 2019 The Feast Authors -// * -// * Licensed under the Apache License, Version 2.0 (the "License"); -// * you may not use this file except in compliance with the License. -// * You may obtain a copy of the License at -// * -// * https://www.apache.org/licenses/LICENSE-2.0 -// * -// * Unless required by applicable law or agreed to in writing, software -// * distributed under the License is distributed on an "AS IS" BASIS, -// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// * See the License for the specific language governing permissions and -// * limitations under the License. -// - -// Code generated by protoc-gen-go. DO NOT EDIT. -// versions: -// protoc-gen-go v1.25.0 -// protoc v3.12.4 -// source: feast/core/Store.proto - -package core - -import ( - proto "github.com/golang/protobuf/proto" - protoreflect "google.golang.org/protobuf/reflect/protoreflect" - protoimpl "google.golang.org/protobuf/runtime/protoimpl" - reflect "reflect" - sync "sync" -) - -const ( - // Verify that this generated code is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) - // Verify that runtime/protoimpl is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) -) - -// This is a compile-time assertion that a sufficiently up-to-date version -// of the legacy proto package is being used. -const _ = proto.ProtoPackageIsVersion4 - -type Store_StoreType int32 - -const ( - Store_INVALID Store_StoreType = 0 - // Redis stores a FeatureRow element as a key, value pair. - // - // The Redis data types used (https://redis.io/topics/data-types): - // - key: STRING - // - value: STRING - // - // Encodings: - // - key: byte array of RedisKey (refer to feast.storage.RedisKeyV2) - // - value: Redis hashmap - // - Store_REDIS Store_StoreType = 1 - Store_REDIS_CLUSTER Store_StoreType = 4 -) - -// Enum value maps for Store_StoreType. -var ( - Store_StoreType_name = map[int32]string{ - 0: "INVALID", - 1: "REDIS", - 4: "REDIS_CLUSTER", - } - Store_StoreType_value = map[string]int32{ - "INVALID": 0, - "REDIS": 1, - "REDIS_CLUSTER": 4, - } -) - -func (x Store_StoreType) Enum() *Store_StoreType { - p := new(Store_StoreType) - *p = x - return p -} - -func (x Store_StoreType) String() string { - return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) -} - -func (Store_StoreType) Descriptor() protoreflect.EnumDescriptor { - return file_feast_core_Store_proto_enumTypes[0].Descriptor() -} - -func (Store_StoreType) Type() protoreflect.EnumType { - return &file_feast_core_Store_proto_enumTypes[0] -} - -func (x Store_StoreType) Number() protoreflect.EnumNumber { - return protoreflect.EnumNumber(x) -} - -// Deprecated: Use Store_StoreType.Descriptor instead. -func (Store_StoreType) EnumDescriptor() ([]byte, []int) { - return file_feast_core_Store_proto_rawDescGZIP(), []int{0, 0} -} - -// Store provides a location where Feast reads and writes feature values. -// Feature values will be written to the Store in the form of FeatureRow elements. -// The way FeatureRow is encoded and decoded when it is written to and read from -// the Store depends on the type of the Store. -// -type Store struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Name of the store. - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - // Type of store. - Type Store_StoreType `protobuf:"varint,2,opt,name=type,proto3,enum=feast.core.Store_StoreType" json:"type,omitempty"` - // Feature sets to subscribe to. - Subscriptions []*Store_Subscription `protobuf:"bytes,4,rep,name=subscriptions,proto3" json:"subscriptions,omitempty"` - // Configuration to connect to the store. Required. - // - // Types that are assignable to Config: - // *Store_RedisConfig_ - // *Store_RedisClusterConfig_ - Config isStore_Config `protobuf_oneof:"config"` -} - -func (x *Store) Reset() { - *x = Store{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_Store_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Store) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Store) ProtoMessage() {} - -func (x *Store) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_Store_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Store.ProtoReflect.Descriptor instead. -func (*Store) Descriptor() ([]byte, []int) { - return file_feast_core_Store_proto_rawDescGZIP(), []int{0} -} - -func (x *Store) GetName() string { - if x != nil { - return x.Name - } - return "" -} - -func (x *Store) GetType() Store_StoreType { - if x != nil { - return x.Type - } - return Store_INVALID -} - -func (x *Store) GetSubscriptions() []*Store_Subscription { - if x != nil { - return x.Subscriptions - } - return nil -} - -func (m *Store) GetConfig() isStore_Config { - if m != nil { - return m.Config - } - return nil -} - -func (x *Store) GetRedisConfig() *Store_RedisConfig { - if x, ok := x.GetConfig().(*Store_RedisConfig_); ok { - return x.RedisConfig - } - return nil -} - -func (x *Store) GetRedisClusterConfig() *Store_RedisClusterConfig { - if x, ok := x.GetConfig().(*Store_RedisClusterConfig_); ok { - return x.RedisClusterConfig - } - return nil -} - -type isStore_Config interface { - isStore_Config() -} - -type Store_RedisConfig_ struct { - RedisConfig *Store_RedisConfig `protobuf:"bytes,11,opt,name=redis_config,json=redisConfig,proto3,oneof"` -} - -type Store_RedisClusterConfig_ struct { - RedisClusterConfig *Store_RedisClusterConfig `protobuf:"bytes,14,opt,name=redis_cluster_config,json=redisClusterConfig,proto3,oneof"` -} - -func (*Store_RedisConfig_) isStore_Config() {} - -func (*Store_RedisClusterConfig_) isStore_Config() {} - -type Store_RedisConfig struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Host string `protobuf:"bytes,1,opt,name=host,proto3" json:"host,omitempty"` - Port int32 `protobuf:"varint,2,opt,name=port,proto3" json:"port,omitempty"` - // Optional. The number of milliseconds to wait before retrying failed Redis connection. - // By default, Feast uses exponential backoff policy and "initial_backoff_ms" sets the initial wait duration. - InitialBackoffMs int32 `protobuf:"varint,3,opt,name=initial_backoff_ms,json=initialBackoffMs,proto3" json:"initial_backoff_ms,omitempty"` - // Optional. Maximum total number of retries for connecting to Redis. Default to zero retries. - MaxRetries int32 `protobuf:"varint,4,opt,name=max_retries,json=maxRetries,proto3" json:"max_retries,omitempty"` - // Optional. How often flush data to redis - FlushFrequencySeconds int32 `protobuf:"varint,5,opt,name=flush_frequency_seconds,json=flushFrequencySeconds,proto3" json:"flush_frequency_seconds,omitempty"` - // Optional. Connect over SSL. - Ssl bool `protobuf:"varint,6,opt,name=ssl,proto3" json:"ssl,omitempty"` -} - -func (x *Store_RedisConfig) Reset() { - *x = Store_RedisConfig{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_Store_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Store_RedisConfig) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Store_RedisConfig) ProtoMessage() {} - -func (x *Store_RedisConfig) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_Store_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Store_RedisConfig.ProtoReflect.Descriptor instead. -func (*Store_RedisConfig) Descriptor() ([]byte, []int) { - return file_feast_core_Store_proto_rawDescGZIP(), []int{0, 0} -} - -func (x *Store_RedisConfig) GetHost() string { - if x != nil { - return x.Host - } - return "" -} - -func (x *Store_RedisConfig) GetPort() int32 { - if x != nil { - return x.Port - } - return 0 -} - -func (x *Store_RedisConfig) GetInitialBackoffMs() int32 { - if x != nil { - return x.InitialBackoffMs - } - return 0 -} - -func (x *Store_RedisConfig) GetMaxRetries() int32 { - if x != nil { - return x.MaxRetries - } - return 0 -} - -func (x *Store_RedisConfig) GetFlushFrequencySeconds() int32 { - if x != nil { - return x.FlushFrequencySeconds - } - return 0 -} - -func (x *Store_RedisConfig) GetSsl() bool { - if x != nil { - return x.Ssl - } - return false -} - -type Store_RedisClusterConfig struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // List of Redis Uri for all the nodes in Redis Cluster, comma separated. Eg. host1:6379, host2:6379 - ConnectionString string `protobuf:"bytes,1,opt,name=connection_string,json=connectionString,proto3" json:"connection_string,omitempty"` - InitialBackoffMs int32 `protobuf:"varint,2,opt,name=initial_backoff_ms,json=initialBackoffMs,proto3" json:"initial_backoff_ms,omitempty"` - MaxRetries int32 `protobuf:"varint,3,opt,name=max_retries,json=maxRetries,proto3" json:"max_retries,omitempty"` - // Optional. How often flush data to redis - FlushFrequencySeconds int32 `protobuf:"varint,4,opt,name=flush_frequency_seconds,json=flushFrequencySeconds,proto3" json:"flush_frequency_seconds,omitempty"` - // Optional. Append a prefix to the Redis Key - KeyPrefix string `protobuf:"bytes,5,opt,name=key_prefix,json=keyPrefix,proto3" json:"key_prefix,omitempty"` - // Optional. Enable fallback to another key prefix if the original key is not present. - // Useful for migrating key prefix without re-ingestion. Disabled by default. - EnableFallback bool `protobuf:"varint,6,opt,name=enable_fallback,json=enableFallback,proto3" json:"enable_fallback,omitempty"` - // Optional. This would be the fallback prefix to use if enable_fallback is true. - FallbackPrefix string `protobuf:"bytes,7,opt,name=fallback_prefix,json=fallbackPrefix,proto3" json:"fallback_prefix,omitempty"` -} - -func (x *Store_RedisClusterConfig) Reset() { - *x = Store_RedisClusterConfig{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_Store_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Store_RedisClusterConfig) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Store_RedisClusterConfig) ProtoMessage() {} - -func (x *Store_RedisClusterConfig) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_Store_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Store_RedisClusterConfig.ProtoReflect.Descriptor instead. -func (*Store_RedisClusterConfig) Descriptor() ([]byte, []int) { - return file_feast_core_Store_proto_rawDescGZIP(), []int{0, 1} -} - -func (x *Store_RedisClusterConfig) GetConnectionString() string { - if x != nil { - return x.ConnectionString - } - return "" -} - -func (x *Store_RedisClusterConfig) GetInitialBackoffMs() int32 { - if x != nil { - return x.InitialBackoffMs - } - return 0 -} - -func (x *Store_RedisClusterConfig) GetMaxRetries() int32 { - if x != nil { - return x.MaxRetries - } - return 0 -} - -func (x *Store_RedisClusterConfig) GetFlushFrequencySeconds() int32 { - if x != nil { - return x.FlushFrequencySeconds - } - return 0 -} - -func (x *Store_RedisClusterConfig) GetKeyPrefix() string { - if x != nil { - return x.KeyPrefix - } - return "" -} - -func (x *Store_RedisClusterConfig) GetEnableFallback() bool { - if x != nil { - return x.EnableFallback - } - return false -} - -func (x *Store_RedisClusterConfig) GetFallbackPrefix() string { - if x != nil { - return x.FallbackPrefix - } - return "" -} - -type Store_Subscription struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Name of project that the feature sets belongs to. This can be one of - // - [project_name] - // - * - // If an asterisk is provided, filtering on projects will be disabled. All projects will - // be matched. It is NOT possible to provide an asterisk with a string in order to do - // pattern matching. - Project string `protobuf:"bytes,3,opt,name=project,proto3" json:"project,omitempty"` - // Name of the desired feature set. Asterisks can be used as wildcards in the name. - // Matching on names is only permitted if a specific project is defined. It is disallowed - // If the project name is set to "*" - // e.g. - // - * can be used to match all feature sets - // - my-feature-set* can be used to match all features prefixed by "my-feature-set" - // - my-feature-set-6 can be used to select a single feature set - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - // All matches with exclude enabled will be filtered out instead of added - Exclude bool `protobuf:"varint,4,opt,name=exclude,proto3" json:"exclude,omitempty"` -} - -func (x *Store_Subscription) Reset() { - *x = Store_Subscription{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_core_Store_proto_msgTypes[3] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Store_Subscription) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Store_Subscription) ProtoMessage() {} - -func (x *Store_Subscription) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_Store_proto_msgTypes[3] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Store_Subscription.ProtoReflect.Descriptor instead. -func (*Store_Subscription) Descriptor() ([]byte, []int) { - return file_feast_core_Store_proto_rawDescGZIP(), []int{0, 2} -} - -func (x *Store_Subscription) GetProject() string { - if x != nil { - return x.Project - } - return "" -} - -func (x *Store_Subscription) GetName() string { - if x != nil { - return x.Name - } - return "" -} - -func (x *Store_Subscription) GetExclude() bool { - if x != nil { - return x.Exclude - } - return false -} - -var File_feast_core_Store_proto protoreflect.FileDescriptor - -var file_feast_core_Store_proto_rawDesc = []byte{ - 0x0a, 0x16, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x53, 0x74, 0x6f, - 0x72, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0a, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, - 0x63, 0x6f, 0x72, 0x65, 0x22, 0xf5, 0x07, 0x0a, 0x05, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x12, 0x12, - 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, - 0x6d, 0x65, 0x12, 0x2f, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, - 0x32, 0x1b, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x74, - 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, 0x74, - 0x79, 0x70, 0x65, 0x12, 0x44, 0x0a, 0x0d, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, - 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x66, 0x65, 0x61, - 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x75, - 0x62, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0d, 0x73, 0x75, 0x62, 0x73, - 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x42, 0x0a, 0x0c, 0x72, 0x65, 0x64, - 0x69, 0x73, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x1d, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x74, 0x6f, - 0x72, 0x65, 0x2e, 0x52, 0x65, 0x64, 0x69, 0x73, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x48, 0x00, - 0x52, 0x0b, 0x72, 0x65, 0x64, 0x69, 0x73, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x58, 0x0a, - 0x14, 0x72, 0x65, 0x64, 0x69, 0x73, 0x5f, 0x63, 0x6c, 0x75, 0x73, 0x74, 0x65, 0x72, 0x5f, 0x63, - 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x66, 0x65, - 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x52, - 0x65, 0x64, 0x69, 0x73, 0x43, 0x6c, 0x75, 0x73, 0x74, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, - 0x67, 0x48, 0x00, 0x52, 0x12, 0x72, 0x65, 0x64, 0x69, 0x73, 0x43, 0x6c, 0x75, 0x73, 0x74, 0x65, - 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x1a, 0xce, 0x01, 0x0a, 0x0b, 0x52, 0x65, 0x64, 0x69, - 0x73, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x12, 0x0a, 0x04, 0x68, 0x6f, 0x73, 0x74, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x68, 0x6f, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x70, - 0x6f, 0x72, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x04, 0x70, 0x6f, 0x72, 0x74, 0x12, - 0x2c, 0x0a, 0x12, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x62, 0x61, 0x63, 0x6b, 0x6f, - 0x66, 0x66, 0x5f, 0x6d, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x10, 0x69, 0x6e, 0x69, - 0x74, 0x69, 0x61, 0x6c, 0x42, 0x61, 0x63, 0x6b, 0x6f, 0x66, 0x66, 0x4d, 0x73, 0x12, 0x1f, 0x0a, - 0x0b, 0x6d, 0x61, 0x78, 0x5f, 0x72, 0x65, 0x74, 0x72, 0x69, 0x65, 0x73, 0x18, 0x04, 0x20, 0x01, - 0x28, 0x05, 0x52, 0x0a, 0x6d, 0x61, 0x78, 0x52, 0x65, 0x74, 0x72, 0x69, 0x65, 0x73, 0x12, 0x36, - 0x0a, 0x17, 0x66, 0x6c, 0x75, 0x73, 0x68, 0x5f, 0x66, 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, - 0x79, 0x5f, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x05, 0x52, - 0x15, 0x66, 0x6c, 0x75, 0x73, 0x68, 0x46, 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x79, 0x53, - 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x12, 0x10, 0x0a, 0x03, 0x73, 0x73, 0x6c, 0x18, 0x06, 0x20, - 0x01, 0x28, 0x08, 0x52, 0x03, 0x73, 0x73, 0x6c, 0x1a, 0xb9, 0x02, 0x0a, 0x12, 0x52, 0x65, 0x64, - 0x69, 0x73, 0x43, 0x6c, 0x75, 0x73, 0x74, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, - 0x2b, 0x0a, 0x11, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x74, - 0x72, 0x69, 0x6e, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x63, 0x6f, 0x6e, 0x6e, - 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x12, 0x2c, 0x0a, 0x12, - 0x69, 0x6e, 0x69, 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x62, 0x61, 0x63, 0x6b, 0x6f, 0x66, 0x66, 0x5f, - 0x6d, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x10, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x61, - 0x6c, 0x42, 0x61, 0x63, 0x6b, 0x6f, 0x66, 0x66, 0x4d, 0x73, 0x12, 0x1f, 0x0a, 0x0b, 0x6d, 0x61, - 0x78, 0x5f, 0x72, 0x65, 0x74, 0x72, 0x69, 0x65, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, - 0x0a, 0x6d, 0x61, 0x78, 0x52, 0x65, 0x74, 0x72, 0x69, 0x65, 0x73, 0x12, 0x36, 0x0a, 0x17, 0x66, - 0x6c, 0x75, 0x73, 0x68, 0x5f, 0x66, 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x79, 0x5f, 0x73, - 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x05, 0x52, 0x15, 0x66, 0x6c, - 0x75, 0x73, 0x68, 0x46, 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x79, 0x53, 0x65, 0x63, 0x6f, - 0x6e, 0x64, 0x73, 0x12, 0x1d, 0x0a, 0x0a, 0x6b, 0x65, 0x79, 0x5f, 0x70, 0x72, 0x65, 0x66, 0x69, - 0x78, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6b, 0x65, 0x79, 0x50, 0x72, 0x65, 0x66, - 0x69, 0x78, 0x12, 0x27, 0x0a, 0x0f, 0x65, 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x66, 0x61, 0x6c, - 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0e, 0x65, 0x6e, 0x61, - 0x62, 0x6c, 0x65, 0x46, 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x12, 0x27, 0x0a, 0x0f, 0x66, - 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x5f, 0x70, 0x72, 0x65, 0x66, 0x69, 0x78, 0x18, 0x07, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x66, 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x50, 0x72, - 0x65, 0x66, 0x69, 0x78, 0x1a, 0x5c, 0x0a, 0x0c, 0x53, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x70, - 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x18, - 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x12, 0x12, - 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, - 0x6d, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x65, 0x78, 0x63, 0x6c, 0x75, 0x64, 0x65, 0x18, 0x04, 0x20, - 0x01, 0x28, 0x08, 0x52, 0x07, 0x65, 0x78, 0x63, 0x6c, 0x75, 0x64, 0x65, 0x4a, 0x04, 0x08, 0x02, - 0x10, 0x03, 0x22, 0x4e, 0x0a, 0x09, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, - 0x0b, 0x0a, 0x07, 0x49, 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, - 0x52, 0x45, 0x44, 0x49, 0x53, 0x10, 0x01, 0x12, 0x11, 0x0a, 0x0d, 0x52, 0x45, 0x44, 0x49, 0x53, - 0x5f, 0x43, 0x4c, 0x55, 0x53, 0x54, 0x45, 0x52, 0x10, 0x04, 0x22, 0x04, 0x08, 0x02, 0x10, 0x02, - 0x22, 0x04, 0x08, 0x03, 0x10, 0x03, 0x22, 0x04, 0x08, 0x0c, 0x10, 0x0c, 0x22, 0x04, 0x08, 0x0d, - 0x10, 0x0d, 0x42, 0x08, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x42, 0x53, 0x0a, 0x10, - 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x63, 0x6f, 0x72, 0x65, - 0x42, 0x0a, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x5a, 0x33, 0x67, 0x69, - 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2d, 0x64, - 0x65, 0x76, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x73, 0x64, 0x6b, 0x2f, 0x67, 0x6f, 0x2f, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x63, 0x6f, 0x72, - 0x65, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} - -var ( - file_feast_core_Store_proto_rawDescOnce sync.Once - file_feast_core_Store_proto_rawDescData = file_feast_core_Store_proto_rawDesc -) - -func file_feast_core_Store_proto_rawDescGZIP() []byte { - file_feast_core_Store_proto_rawDescOnce.Do(func() { - file_feast_core_Store_proto_rawDescData = protoimpl.X.CompressGZIP(file_feast_core_Store_proto_rawDescData) - }) - return file_feast_core_Store_proto_rawDescData -} - -var file_feast_core_Store_proto_enumTypes = make([]protoimpl.EnumInfo, 1) -var file_feast_core_Store_proto_msgTypes = make([]protoimpl.MessageInfo, 4) -var file_feast_core_Store_proto_goTypes = []interface{}{ - (Store_StoreType)(0), // 0: feast.core.Store.StoreType - (*Store)(nil), // 1: feast.core.Store - (*Store_RedisConfig)(nil), // 2: feast.core.Store.RedisConfig - (*Store_RedisClusterConfig)(nil), // 3: feast.core.Store.RedisClusterConfig - (*Store_Subscription)(nil), // 4: feast.core.Store.Subscription -} -var file_feast_core_Store_proto_depIdxs = []int32{ - 0, // 0: feast.core.Store.type:type_name -> feast.core.Store.StoreType - 4, // 1: feast.core.Store.subscriptions:type_name -> feast.core.Store.Subscription - 2, // 2: feast.core.Store.redis_config:type_name -> feast.core.Store.RedisConfig - 3, // 3: feast.core.Store.redis_cluster_config:type_name -> feast.core.Store.RedisClusterConfig - 4, // [4:4] is the sub-list for method output_type - 4, // [4:4] is the sub-list for method input_type - 4, // [4:4] is the sub-list for extension type_name - 4, // [4:4] is the sub-list for extension extendee - 0, // [0:4] is the sub-list for field type_name -} - -func init() { file_feast_core_Store_proto_init() } -func file_feast_core_Store_proto_init() { - if File_feast_core_Store_proto != nil { - return - } - if !protoimpl.UnsafeEnabled { - file_feast_core_Store_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Store); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_Store_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Store_RedisConfig); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_Store_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Store_RedisClusterConfig); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_core_Store_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Store_Subscription); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - file_feast_core_Store_proto_msgTypes[0].OneofWrappers = []interface{}{ - (*Store_RedisConfig_)(nil), - (*Store_RedisClusterConfig_)(nil), - } - type x struct{} - out := protoimpl.TypeBuilder{ - File: protoimpl.DescBuilder{ - GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_feast_core_Store_proto_rawDesc, - NumEnums: 1, - NumMessages: 4, - NumExtensions: 0, - NumServices: 0, - }, - GoTypes: file_feast_core_Store_proto_goTypes, - DependencyIndexes: file_feast_core_Store_proto_depIdxs, - EnumInfos: file_feast_core_Store_proto_enumTypes, - MessageInfos: file_feast_core_Store_proto_msgTypes, - }.Build() - File_feast_core_Store_proto = out.File - file_feast_core_Store_proto_rawDesc = nil - file_feast_core_Store_proto_goTypes = nil - file_feast_core_Store_proto_depIdxs = nil -} diff --git a/sdk/go/protos/feast/serving/ServingService.pb.go b/sdk/go/protos/feast/serving/ServingService.pb.go deleted file mode 100644 index 7d40fa45d5..0000000000 --- a/sdk/go/protos/feast/serving/ServingService.pb.go +++ /dev/null @@ -1,973 +0,0 @@ -// -// Copyright 2018 The Feast Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Code generated by protoc-gen-go. DO NOT EDIT. -// versions: -// protoc-gen-go v1.25.0 -// protoc v3.12.4 -// source: feast/serving/ServingService.proto - -package serving - -import ( - context "context" - types "github.com/feast-dev/feast/sdk/go/protos/feast/types" - _ "github.com/feast-dev/feast/sdk/go/protos/tensorflow_metadata/proto/v0" - proto "github.com/golang/protobuf/proto" - timestamp "github.com/golang/protobuf/ptypes/timestamp" - grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" - protoreflect "google.golang.org/protobuf/reflect/protoreflect" - protoimpl "google.golang.org/protobuf/runtime/protoimpl" - reflect "reflect" - sync "sync" -) - -const ( - // Verify that this generated code is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) - // Verify that runtime/protoimpl is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) -) - -// This is a compile-time assertion that a sufficiently up-to-date version -// of the legacy proto package is being used. -const _ = proto.ProtoPackageIsVersion4 - -type FeastServingType int32 - -const ( - FeastServingType_FEAST_SERVING_TYPE_INVALID FeastServingType = 0 - // Online serving receives entity data directly and synchronously and will - // respond immediately. - FeastServingType_FEAST_SERVING_TYPE_ONLINE FeastServingType = 1 - // Batch serving receives entity data asynchronously and orchestrates the - // retrieval through a staging location. - FeastServingType_FEAST_SERVING_TYPE_BATCH FeastServingType = 2 -) - -// Enum value maps for FeastServingType. -var ( - FeastServingType_name = map[int32]string{ - 0: "FEAST_SERVING_TYPE_INVALID", - 1: "FEAST_SERVING_TYPE_ONLINE", - 2: "FEAST_SERVING_TYPE_BATCH", - } - FeastServingType_value = map[string]int32{ - "FEAST_SERVING_TYPE_INVALID": 0, - "FEAST_SERVING_TYPE_ONLINE": 1, - "FEAST_SERVING_TYPE_BATCH": 2, - } -) - -func (x FeastServingType) Enum() *FeastServingType { - p := new(FeastServingType) - *p = x - return p -} - -func (x FeastServingType) String() string { - return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) -} - -func (FeastServingType) Descriptor() protoreflect.EnumDescriptor { - return file_feast_serving_ServingService_proto_enumTypes[0].Descriptor() -} - -func (FeastServingType) Type() protoreflect.EnumType { - return &file_feast_serving_ServingService_proto_enumTypes[0] -} - -func (x FeastServingType) Number() protoreflect.EnumNumber { - return protoreflect.EnumNumber(x) -} - -// Deprecated: Use FeastServingType.Descriptor instead. -func (FeastServingType) EnumDescriptor() ([]byte, []int) { - return file_feast_serving_ServingService_proto_rawDescGZIP(), []int{0} -} - -type GetOnlineFeaturesResponse_FieldStatus int32 - -const ( - // Status is unset for this field. - GetOnlineFeaturesResponse_INVALID GetOnlineFeaturesResponse_FieldStatus = 0 - // Field value is present for this field and age is within max age. - GetOnlineFeaturesResponse_PRESENT GetOnlineFeaturesResponse_FieldStatus = 1 - // Values could be found for entity key and age is within max age, but - // this field value is assigned a value on ingestion into feast. - GetOnlineFeaturesResponse_NULL_VALUE GetOnlineFeaturesResponse_FieldStatus = 2 - // Entity key did not return any values as they do not exist in Feast. - // This could suggest that the feature values have not yet been ingested - // into feast or the ingestion failed. - GetOnlineFeaturesResponse_NOT_FOUND GetOnlineFeaturesResponse_FieldStatus = 3 - // Values could be found for entity key, but field values are outside the maximum - // allowable range. - GetOnlineFeaturesResponse_OUTSIDE_MAX_AGE GetOnlineFeaturesResponse_FieldStatus = 4 -) - -// Enum value maps for GetOnlineFeaturesResponse_FieldStatus. -var ( - GetOnlineFeaturesResponse_FieldStatus_name = map[int32]string{ - 0: "INVALID", - 1: "PRESENT", - 2: "NULL_VALUE", - 3: "NOT_FOUND", - 4: "OUTSIDE_MAX_AGE", - } - GetOnlineFeaturesResponse_FieldStatus_value = map[string]int32{ - "INVALID": 0, - "PRESENT": 1, - "NULL_VALUE": 2, - "NOT_FOUND": 3, - "OUTSIDE_MAX_AGE": 4, - } -) - -func (x GetOnlineFeaturesResponse_FieldStatus) Enum() *GetOnlineFeaturesResponse_FieldStatus { - p := new(GetOnlineFeaturesResponse_FieldStatus) - *p = x - return p -} - -func (x GetOnlineFeaturesResponse_FieldStatus) String() string { - return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) -} - -func (GetOnlineFeaturesResponse_FieldStatus) Descriptor() protoreflect.EnumDescriptor { - return file_feast_serving_ServingService_proto_enumTypes[1].Descriptor() -} - -func (GetOnlineFeaturesResponse_FieldStatus) Type() protoreflect.EnumType { - return &file_feast_serving_ServingService_proto_enumTypes[1] -} - -func (x GetOnlineFeaturesResponse_FieldStatus) Number() protoreflect.EnumNumber { - return protoreflect.EnumNumber(x) -} - -// Deprecated: Use GetOnlineFeaturesResponse_FieldStatus.Descriptor instead. -func (GetOnlineFeaturesResponse_FieldStatus) EnumDescriptor() ([]byte, []int) { - return file_feast_serving_ServingService_proto_rawDescGZIP(), []int{4, 0} -} - -type GetFeastServingInfoRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields -} - -func (x *GetFeastServingInfoRequest) Reset() { - *x = GetFeastServingInfoRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_serving_ServingService_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetFeastServingInfoRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetFeastServingInfoRequest) ProtoMessage() {} - -func (x *GetFeastServingInfoRequest) ProtoReflect() protoreflect.Message { - mi := &file_feast_serving_ServingService_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetFeastServingInfoRequest.ProtoReflect.Descriptor instead. -func (*GetFeastServingInfoRequest) Descriptor() ([]byte, []int) { - return file_feast_serving_ServingService_proto_rawDescGZIP(), []int{0} -} - -type GetFeastServingInfoResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Feast version of this serving deployment. - Version string `protobuf:"bytes,1,opt,name=version,proto3" json:"version,omitempty"` - // Type of serving deployment, either ONLINE or BATCH. Different store types support different - // feature retrieval methods. - Type FeastServingType `protobuf:"varint,2,opt,name=type,proto3,enum=feast.serving.FeastServingType" json:"type,omitempty"` - // Note: Batch specific options start from 10. - // Staging location for this serving store, if any. - JobStagingLocation string `protobuf:"bytes,10,opt,name=job_staging_location,json=jobStagingLocation,proto3" json:"job_staging_location,omitempty"` -} - -func (x *GetFeastServingInfoResponse) Reset() { - *x = GetFeastServingInfoResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_serving_ServingService_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetFeastServingInfoResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetFeastServingInfoResponse) ProtoMessage() {} - -func (x *GetFeastServingInfoResponse) ProtoReflect() protoreflect.Message { - mi := &file_feast_serving_ServingService_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetFeastServingInfoResponse.ProtoReflect.Descriptor instead. -func (*GetFeastServingInfoResponse) Descriptor() ([]byte, []int) { - return file_feast_serving_ServingService_proto_rawDescGZIP(), []int{1} -} - -func (x *GetFeastServingInfoResponse) GetVersion() string { - if x != nil { - return x.Version - } - return "" -} - -func (x *GetFeastServingInfoResponse) GetType() FeastServingType { - if x != nil { - return x.Type - } - return FeastServingType_FEAST_SERVING_TYPE_INVALID -} - -func (x *GetFeastServingInfoResponse) GetJobStagingLocation() string { - if x != nil { - return x.JobStagingLocation - } - return "" -} - -type FeatureReferenceV2 struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Name of the Feature Table to retrieve the feature from. - FeatureTable string `protobuf:"bytes,1,opt,name=feature_table,json=featureTable,proto3" json:"feature_table,omitempty"` - // Name of the Feature to retrieve the feature from. - Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` -} - -func (x *FeatureReferenceV2) Reset() { - *x = FeatureReferenceV2{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_serving_ServingService_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *FeatureReferenceV2) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*FeatureReferenceV2) ProtoMessage() {} - -func (x *FeatureReferenceV2) ProtoReflect() protoreflect.Message { - mi := &file_feast_serving_ServingService_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use FeatureReferenceV2.ProtoReflect.Descriptor instead. -func (*FeatureReferenceV2) Descriptor() ([]byte, []int) { - return file_feast_serving_ServingService_proto_rawDescGZIP(), []int{2} -} - -func (x *FeatureReferenceV2) GetFeatureTable() string { - if x != nil { - return x.FeatureTable - } - return "" -} - -func (x *FeatureReferenceV2) GetName() string { - if x != nil { - return x.Name - } - return "" -} - -type GetOnlineFeaturesRequestV2 struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // List of features that are being retrieved - Features []*FeatureReferenceV2 `protobuf:"bytes,4,rep,name=features,proto3" json:"features,omitempty"` - // List of entity rows, containing entity id and timestamp data. - // Used during retrieval of feature rows and for joining feature - // rows into a final dataset - EntityRows []*GetOnlineFeaturesRequestV2_EntityRow `protobuf:"bytes,2,rep,name=entity_rows,json=entityRows,proto3" json:"entity_rows,omitempty"` - // Optional field to specify project name override. If specified, uses the - // given project for retrieval. Overrides the projects specified in - // Feature References if both are specified. - Project string `protobuf:"bytes,5,opt,name=project,proto3" json:"project,omitempty"` -} - -func (x *GetOnlineFeaturesRequestV2) Reset() { - *x = GetOnlineFeaturesRequestV2{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_serving_ServingService_proto_msgTypes[3] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetOnlineFeaturesRequestV2) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetOnlineFeaturesRequestV2) ProtoMessage() {} - -func (x *GetOnlineFeaturesRequestV2) ProtoReflect() protoreflect.Message { - mi := &file_feast_serving_ServingService_proto_msgTypes[3] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetOnlineFeaturesRequestV2.ProtoReflect.Descriptor instead. -func (*GetOnlineFeaturesRequestV2) Descriptor() ([]byte, []int) { - return file_feast_serving_ServingService_proto_rawDescGZIP(), []int{3} -} - -func (x *GetOnlineFeaturesRequestV2) GetFeatures() []*FeatureReferenceV2 { - if x != nil { - return x.Features - } - return nil -} - -func (x *GetOnlineFeaturesRequestV2) GetEntityRows() []*GetOnlineFeaturesRequestV2_EntityRow { - if x != nil { - return x.EntityRows - } - return nil -} - -func (x *GetOnlineFeaturesRequestV2) GetProject() string { - if x != nil { - return x.Project - } - return "" -} - -type GetOnlineFeaturesResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Feature values retrieved from feast. - FieldValues []*GetOnlineFeaturesResponse_FieldValues `protobuf:"bytes,1,rep,name=field_values,json=fieldValues,proto3" json:"field_values,omitempty"` -} - -func (x *GetOnlineFeaturesResponse) Reset() { - *x = GetOnlineFeaturesResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_serving_ServingService_proto_msgTypes[4] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetOnlineFeaturesResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetOnlineFeaturesResponse) ProtoMessage() {} - -func (x *GetOnlineFeaturesResponse) ProtoReflect() protoreflect.Message { - mi := &file_feast_serving_ServingService_proto_msgTypes[4] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetOnlineFeaturesResponse.ProtoReflect.Descriptor instead. -func (*GetOnlineFeaturesResponse) Descriptor() ([]byte, []int) { - return file_feast_serving_ServingService_proto_rawDescGZIP(), []int{4} -} - -func (x *GetOnlineFeaturesResponse) GetFieldValues() []*GetOnlineFeaturesResponse_FieldValues { - if x != nil { - return x.FieldValues - } - return nil -} - -type GetOnlineFeaturesRequestV2_EntityRow struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Request timestamp of this row. This value will be used, - // together with maxAge, to determine feature staleness. - Timestamp *timestamp.Timestamp `protobuf:"bytes,1,opt,name=timestamp,proto3" json:"timestamp,omitempty"` - // Map containing mapping of entity name to entity value. - Fields map[string]*types.Value `protobuf:"bytes,2,rep,name=fields,proto3" json:"fields,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` -} - -func (x *GetOnlineFeaturesRequestV2_EntityRow) Reset() { - *x = GetOnlineFeaturesRequestV2_EntityRow{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_serving_ServingService_proto_msgTypes[5] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetOnlineFeaturesRequestV2_EntityRow) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetOnlineFeaturesRequestV2_EntityRow) ProtoMessage() {} - -func (x *GetOnlineFeaturesRequestV2_EntityRow) ProtoReflect() protoreflect.Message { - mi := &file_feast_serving_ServingService_proto_msgTypes[5] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetOnlineFeaturesRequestV2_EntityRow.ProtoReflect.Descriptor instead. -func (*GetOnlineFeaturesRequestV2_EntityRow) Descriptor() ([]byte, []int) { - return file_feast_serving_ServingService_proto_rawDescGZIP(), []int{3, 0} -} - -func (x *GetOnlineFeaturesRequestV2_EntityRow) GetTimestamp() *timestamp.Timestamp { - if x != nil { - return x.Timestamp - } - return nil -} - -func (x *GetOnlineFeaturesRequestV2_EntityRow) GetFields() map[string]*types.Value { - if x != nil { - return x.Fields - } - return nil -} - -type GetOnlineFeaturesResponse_FieldValues struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Map of feature or entity name to feature/entity values. - // Timestamps are not returned in this response. - Fields map[string]*types.Value `protobuf:"bytes,1,rep,name=fields,proto3" json:"fields,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - // Map of feature or entity name to feature/entity statuses/metadata. - Statuses map[string]GetOnlineFeaturesResponse_FieldStatus `protobuf:"bytes,2,rep,name=statuses,proto3" json:"statuses,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3,enum=feast.serving.GetOnlineFeaturesResponse_FieldStatus"` -} - -func (x *GetOnlineFeaturesResponse_FieldValues) Reset() { - *x = GetOnlineFeaturesResponse_FieldValues{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_serving_ServingService_proto_msgTypes[7] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetOnlineFeaturesResponse_FieldValues) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetOnlineFeaturesResponse_FieldValues) ProtoMessage() {} - -func (x *GetOnlineFeaturesResponse_FieldValues) ProtoReflect() protoreflect.Message { - mi := &file_feast_serving_ServingService_proto_msgTypes[7] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetOnlineFeaturesResponse_FieldValues.ProtoReflect.Descriptor instead. -func (*GetOnlineFeaturesResponse_FieldValues) Descriptor() ([]byte, []int) { - return file_feast_serving_ServingService_proto_rawDescGZIP(), []int{4, 0} -} - -func (x *GetOnlineFeaturesResponse_FieldValues) GetFields() map[string]*types.Value { - if x != nil { - return x.Fields - } - return nil -} - -func (x *GetOnlineFeaturesResponse_FieldValues) GetStatuses() map[string]GetOnlineFeaturesResponse_FieldStatus { - if x != nil { - return x.Statuses - } - return nil -} - -var File_feast_serving_ServingService_proto protoreflect.FileDescriptor - -var file_feast_serving_ServingService_proto_rawDesc = []byte{ - 0x0a, 0x22, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x2f, - 0x53, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0d, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, - 0x69, 0x6e, 0x67, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x17, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x74, 0x79, 0x70, 0x65, - 0x73, 0x2f, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x2d, 0x74, - 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, - 0x74, 0x61, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x76, 0x30, 0x2f, 0x73, 0x74, 0x61, 0x74, - 0x69, 0x73, 0x74, 0x69, 0x63, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x1c, 0x0a, 0x1a, - 0x47, 0x65, 0x74, 0x46, 0x65, 0x61, 0x73, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x49, - 0x6e, 0x66, 0x6f, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x9e, 0x01, 0x0a, 0x1b, 0x47, - 0x65, 0x74, 0x46, 0x65, 0x61, 0x73, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x49, 0x6e, - 0x66, 0x6f, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, - 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, - 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x33, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x0e, 0x32, 0x1f, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, - 0x6e, 0x67, 0x2e, 0x46, 0x65, 0x61, 0x73, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x54, - 0x79, 0x70, 0x65, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x30, 0x0a, 0x14, 0x6a, 0x6f, 0x62, - 0x5f, 0x73, 0x74, 0x61, 0x67, 0x69, 0x6e, 0x67, 0x5f, 0x6c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x09, 0x52, 0x12, 0x6a, 0x6f, 0x62, 0x53, 0x74, 0x61, 0x67, - 0x69, 0x6e, 0x67, 0x4c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x4d, 0x0a, 0x12, 0x46, - 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x56, - 0x32, 0x12, 0x23, 0x0a, 0x0d, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x5f, 0x74, 0x61, 0x62, - 0x6c, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, - 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0xbb, 0x03, 0x0a, 0x1a, 0x47, - 0x65, 0x74, 0x4f, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x56, 0x32, 0x12, 0x3d, 0x0a, 0x08, 0x66, 0x65, 0x61, - 0x74, 0x75, 0x72, 0x65, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x66, 0x65, - 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x2e, 0x46, 0x65, 0x61, 0x74, - 0x75, 0x72, 0x65, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x56, 0x32, 0x52, 0x08, - 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x12, 0x54, 0x0a, 0x0b, 0x65, 0x6e, 0x74, 0x69, - 0x74, 0x79, 0x5f, 0x72, 0x6f, 0x77, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x33, 0x2e, - 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x2e, 0x47, 0x65, - 0x74, 0x4f, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x56, 0x32, 0x2e, 0x45, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x52, - 0x6f, 0x77, 0x52, 0x0a, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x52, 0x6f, 0x77, 0x73, 0x12, 0x18, - 0x0a, 0x07, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x07, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x1a, 0xed, 0x01, 0x0a, 0x09, 0x45, 0x6e, 0x74, - 0x69, 0x74, 0x79, 0x52, 0x6f, 0x77, 0x12, 0x38, 0x0a, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, - 0x61, 0x6d, 0x70, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, - 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, - 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, - 0x12, 0x57, 0x0a, 0x06, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x3f, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, - 0x2e, 0x47, 0x65, 0x74, 0x4f, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, - 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x56, 0x32, 0x2e, 0x45, 0x6e, 0x74, 0x69, - 0x74, 0x79, 0x52, 0x6f, 0x77, 0x2e, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x45, 0x6e, 0x74, 0x72, - 0x79, 0x52, 0x06, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x1a, 0x4d, 0x0a, 0x0b, 0x46, 0x69, 0x65, - 0x6c, 0x64, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x28, 0x0a, 0x05, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x66, 0x65, 0x61, 0x73, - 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x05, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xdd, 0x04, 0x0a, 0x19, 0x47, 0x65, 0x74, - 0x4f, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x52, 0x65, - 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x57, 0x0a, 0x0c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x34, 0x2e, 0x66, - 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x2e, 0x47, 0x65, 0x74, - 0x4f, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x52, 0x65, - 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x56, 0x61, 0x6c, 0x75, - 0x65, 0x73, 0x52, 0x0b, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x1a, - 0x89, 0x03, 0x0a, 0x0b, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, - 0x58, 0x0a, 0x06, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, - 0x40, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x2e, - 0x47, 0x65, 0x74, 0x4f, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, - 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x56, - 0x61, 0x6c, 0x75, 0x65, 0x73, 0x2e, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x45, 0x6e, 0x74, 0x72, - 0x79, 0x52, 0x06, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x12, 0x5e, 0x0a, 0x08, 0x73, 0x74, 0x61, - 0x74, 0x75, 0x73, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x42, 0x2e, 0x66, 0x65, - 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x2e, 0x47, 0x65, 0x74, 0x4f, - 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x56, 0x61, 0x6c, 0x75, 0x65, - 0x73, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, - 0x08, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x65, 0x73, 0x1a, 0x4d, 0x0a, 0x0b, 0x46, 0x69, 0x65, - 0x6c, 0x64, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x28, 0x0a, 0x05, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x66, 0x65, 0x61, 0x73, - 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x05, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x71, 0x0a, 0x0d, 0x53, 0x74, 0x61, 0x74, - 0x75, 0x73, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x4a, 0x0a, 0x05, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x34, 0x2e, 0x66, 0x65, 0x61, - 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x2e, 0x47, 0x65, 0x74, 0x4f, 0x6e, - 0x6c, 0x69, 0x6e, 0x65, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, - 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, - 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x5b, 0x0a, 0x0b, 0x46, - 0x69, 0x65, 0x6c, 0x64, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x0b, 0x0a, 0x07, 0x49, 0x4e, - 0x56, 0x41, 0x4c, 0x49, 0x44, 0x10, 0x00, 0x12, 0x0b, 0x0a, 0x07, 0x50, 0x52, 0x45, 0x53, 0x45, - 0x4e, 0x54, 0x10, 0x01, 0x12, 0x0e, 0x0a, 0x0a, 0x4e, 0x55, 0x4c, 0x4c, 0x5f, 0x56, 0x41, 0x4c, - 0x55, 0x45, 0x10, 0x02, 0x12, 0x0d, 0x0a, 0x09, 0x4e, 0x4f, 0x54, 0x5f, 0x46, 0x4f, 0x55, 0x4e, - 0x44, 0x10, 0x03, 0x12, 0x13, 0x0a, 0x0f, 0x4f, 0x55, 0x54, 0x53, 0x49, 0x44, 0x45, 0x5f, 0x4d, - 0x41, 0x58, 0x5f, 0x41, 0x47, 0x45, 0x10, 0x04, 0x2a, 0x6f, 0x0a, 0x10, 0x46, 0x65, 0x61, 0x73, - 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1e, 0x0a, 0x1a, - 0x46, 0x45, 0x41, 0x53, 0x54, 0x5f, 0x53, 0x45, 0x52, 0x56, 0x49, 0x4e, 0x47, 0x5f, 0x54, 0x59, - 0x50, 0x45, 0x5f, 0x49, 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, 0x10, 0x00, 0x12, 0x1d, 0x0a, 0x19, - 0x46, 0x45, 0x41, 0x53, 0x54, 0x5f, 0x53, 0x45, 0x52, 0x56, 0x49, 0x4e, 0x47, 0x5f, 0x54, 0x59, - 0x50, 0x45, 0x5f, 0x4f, 0x4e, 0x4c, 0x49, 0x4e, 0x45, 0x10, 0x01, 0x12, 0x1c, 0x0a, 0x18, 0x46, - 0x45, 0x41, 0x53, 0x54, 0x5f, 0x53, 0x45, 0x52, 0x56, 0x49, 0x4e, 0x47, 0x5f, 0x54, 0x59, 0x50, - 0x45, 0x5f, 0x42, 0x41, 0x54, 0x43, 0x48, 0x10, 0x02, 0x32, 0xea, 0x01, 0x0a, 0x0e, 0x53, 0x65, - 0x72, 0x76, 0x69, 0x6e, 0x67, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x6c, 0x0a, 0x13, - 0x47, 0x65, 0x74, 0x46, 0x65, 0x61, 0x73, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x49, - 0x6e, 0x66, 0x6f, 0x12, 0x29, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, - 0x69, 0x6e, 0x67, 0x2e, 0x47, 0x65, 0x74, 0x46, 0x65, 0x61, 0x73, 0x74, 0x53, 0x65, 0x72, 0x76, - 0x69, 0x6e, 0x67, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2a, - 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x2e, 0x47, - 0x65, 0x74, 0x46, 0x65, 0x61, 0x73, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x49, 0x6e, - 0x66, 0x6f, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x6a, 0x0a, 0x13, 0x47, 0x65, - 0x74, 0x4f, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x56, - 0x32, 0x12, 0x29, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, - 0x67, 0x2e, 0x47, 0x65, 0x74, 0x4f, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x46, 0x65, 0x61, 0x74, 0x75, - 0x72, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x56, 0x32, 0x1a, 0x28, 0x2e, 0x66, - 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x2e, 0x47, 0x65, 0x74, - 0x4f, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x52, 0x65, - 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x5e, 0x0a, 0x13, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x42, 0x0f, 0x53, - 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x41, 0x50, 0x49, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x5a, 0x36, - 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, - 0x2d, 0x64, 0x65, 0x76, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x73, 0x64, 0x6b, 0x2f, 0x67, - 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x73, - 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} - -var ( - file_feast_serving_ServingService_proto_rawDescOnce sync.Once - file_feast_serving_ServingService_proto_rawDescData = file_feast_serving_ServingService_proto_rawDesc -) - -func file_feast_serving_ServingService_proto_rawDescGZIP() []byte { - file_feast_serving_ServingService_proto_rawDescOnce.Do(func() { - file_feast_serving_ServingService_proto_rawDescData = protoimpl.X.CompressGZIP(file_feast_serving_ServingService_proto_rawDescData) - }) - return file_feast_serving_ServingService_proto_rawDescData -} - -var file_feast_serving_ServingService_proto_enumTypes = make([]protoimpl.EnumInfo, 2) -var file_feast_serving_ServingService_proto_msgTypes = make([]protoimpl.MessageInfo, 10) -var file_feast_serving_ServingService_proto_goTypes = []interface{}{ - (FeastServingType)(0), // 0: feast.serving.FeastServingType - (GetOnlineFeaturesResponse_FieldStatus)(0), // 1: feast.serving.GetOnlineFeaturesResponse.FieldStatus - (*GetFeastServingInfoRequest)(nil), // 2: feast.serving.GetFeastServingInfoRequest - (*GetFeastServingInfoResponse)(nil), // 3: feast.serving.GetFeastServingInfoResponse - (*FeatureReferenceV2)(nil), // 4: feast.serving.FeatureReferenceV2 - (*GetOnlineFeaturesRequestV2)(nil), // 5: feast.serving.GetOnlineFeaturesRequestV2 - (*GetOnlineFeaturesResponse)(nil), // 6: feast.serving.GetOnlineFeaturesResponse - (*GetOnlineFeaturesRequestV2_EntityRow)(nil), // 7: feast.serving.GetOnlineFeaturesRequestV2.EntityRow - nil, // 8: feast.serving.GetOnlineFeaturesRequestV2.EntityRow.FieldsEntry - (*GetOnlineFeaturesResponse_FieldValues)(nil), // 9: feast.serving.GetOnlineFeaturesResponse.FieldValues - nil, // 10: feast.serving.GetOnlineFeaturesResponse.FieldValues.FieldsEntry - nil, // 11: feast.serving.GetOnlineFeaturesResponse.FieldValues.StatusesEntry - (*timestamp.Timestamp)(nil), // 12: google.protobuf.Timestamp - (*types.Value)(nil), // 13: feast.types.Value -} -var file_feast_serving_ServingService_proto_depIdxs = []int32{ - 0, // 0: feast.serving.GetFeastServingInfoResponse.type:type_name -> feast.serving.FeastServingType - 4, // 1: feast.serving.GetOnlineFeaturesRequestV2.features:type_name -> feast.serving.FeatureReferenceV2 - 7, // 2: feast.serving.GetOnlineFeaturesRequestV2.entity_rows:type_name -> feast.serving.GetOnlineFeaturesRequestV2.EntityRow - 9, // 3: feast.serving.GetOnlineFeaturesResponse.field_values:type_name -> feast.serving.GetOnlineFeaturesResponse.FieldValues - 12, // 4: feast.serving.GetOnlineFeaturesRequestV2.EntityRow.timestamp:type_name -> google.protobuf.Timestamp - 8, // 5: feast.serving.GetOnlineFeaturesRequestV2.EntityRow.fields:type_name -> feast.serving.GetOnlineFeaturesRequestV2.EntityRow.FieldsEntry - 13, // 6: feast.serving.GetOnlineFeaturesRequestV2.EntityRow.FieldsEntry.value:type_name -> feast.types.Value - 10, // 7: feast.serving.GetOnlineFeaturesResponse.FieldValues.fields:type_name -> feast.serving.GetOnlineFeaturesResponse.FieldValues.FieldsEntry - 11, // 8: feast.serving.GetOnlineFeaturesResponse.FieldValues.statuses:type_name -> feast.serving.GetOnlineFeaturesResponse.FieldValues.StatusesEntry - 13, // 9: feast.serving.GetOnlineFeaturesResponse.FieldValues.FieldsEntry.value:type_name -> feast.types.Value - 1, // 10: feast.serving.GetOnlineFeaturesResponse.FieldValues.StatusesEntry.value:type_name -> feast.serving.GetOnlineFeaturesResponse.FieldStatus - 2, // 11: feast.serving.ServingService.GetFeastServingInfo:input_type -> feast.serving.GetFeastServingInfoRequest - 5, // 12: feast.serving.ServingService.GetOnlineFeaturesV2:input_type -> feast.serving.GetOnlineFeaturesRequestV2 - 3, // 13: feast.serving.ServingService.GetFeastServingInfo:output_type -> feast.serving.GetFeastServingInfoResponse - 6, // 14: feast.serving.ServingService.GetOnlineFeaturesV2:output_type -> feast.serving.GetOnlineFeaturesResponse - 13, // [13:15] is the sub-list for method output_type - 11, // [11:13] is the sub-list for method input_type - 11, // [11:11] is the sub-list for extension type_name - 11, // [11:11] is the sub-list for extension extendee - 0, // [0:11] is the sub-list for field type_name -} - -func init() { file_feast_serving_ServingService_proto_init() } -func file_feast_serving_ServingService_proto_init() { - if File_feast_serving_ServingService_proto != nil { - return - } - if !protoimpl.UnsafeEnabled { - file_feast_serving_ServingService_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetFeastServingInfoRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_serving_ServingService_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetFeastServingInfoResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_serving_ServingService_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*FeatureReferenceV2); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_serving_ServingService_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetOnlineFeaturesRequestV2); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_serving_ServingService_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetOnlineFeaturesResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_serving_ServingService_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetOnlineFeaturesRequestV2_EntityRow); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_serving_ServingService_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetOnlineFeaturesResponse_FieldValues); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - type x struct{} - out := protoimpl.TypeBuilder{ - File: protoimpl.DescBuilder{ - GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_feast_serving_ServingService_proto_rawDesc, - NumEnums: 2, - NumMessages: 10, - NumExtensions: 0, - NumServices: 1, - }, - GoTypes: file_feast_serving_ServingService_proto_goTypes, - DependencyIndexes: file_feast_serving_ServingService_proto_depIdxs, - EnumInfos: file_feast_serving_ServingService_proto_enumTypes, - MessageInfos: file_feast_serving_ServingService_proto_msgTypes, - }.Build() - File_feast_serving_ServingService_proto = out.File - file_feast_serving_ServingService_proto_rawDesc = nil - file_feast_serving_ServingService_proto_goTypes = nil - file_feast_serving_ServingService_proto_depIdxs = nil -} - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConnInterface - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion6 - -// ServingServiceClient is the client API for ServingService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. -type ServingServiceClient interface { - // Get information about this Feast serving. - GetFeastServingInfo(ctx context.Context, in *GetFeastServingInfoRequest, opts ...grpc.CallOption) (*GetFeastServingInfoResponse, error) - // Get online features (v2) synchronously. - GetOnlineFeaturesV2(ctx context.Context, in *GetOnlineFeaturesRequestV2, opts ...grpc.CallOption) (*GetOnlineFeaturesResponse, error) -} - -type servingServiceClient struct { - cc grpc.ClientConnInterface -} - -func NewServingServiceClient(cc grpc.ClientConnInterface) ServingServiceClient { - return &servingServiceClient{cc} -} - -func (c *servingServiceClient) GetFeastServingInfo(ctx context.Context, in *GetFeastServingInfoRequest, opts ...grpc.CallOption) (*GetFeastServingInfoResponse, error) { - out := new(GetFeastServingInfoResponse) - err := c.cc.Invoke(ctx, "/feast.serving.ServingService/GetFeastServingInfo", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *servingServiceClient) GetOnlineFeaturesV2(ctx context.Context, in *GetOnlineFeaturesRequestV2, opts ...grpc.CallOption) (*GetOnlineFeaturesResponse, error) { - out := new(GetOnlineFeaturesResponse) - err := c.cc.Invoke(ctx, "/feast.serving.ServingService/GetOnlineFeaturesV2", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// ServingServiceServer is the server API for ServingService service. -type ServingServiceServer interface { - // Get information about this Feast serving. - GetFeastServingInfo(context.Context, *GetFeastServingInfoRequest) (*GetFeastServingInfoResponse, error) - // Get online features (v2) synchronously. - GetOnlineFeaturesV2(context.Context, *GetOnlineFeaturesRequestV2) (*GetOnlineFeaturesResponse, error) -} - -// UnimplementedServingServiceServer can be embedded to have forward compatible implementations. -type UnimplementedServingServiceServer struct { -} - -func (*UnimplementedServingServiceServer) GetFeastServingInfo(context.Context, *GetFeastServingInfoRequest) (*GetFeastServingInfoResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetFeastServingInfo not implemented") -} -func (*UnimplementedServingServiceServer) GetOnlineFeaturesV2(context.Context, *GetOnlineFeaturesRequestV2) (*GetOnlineFeaturesResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetOnlineFeaturesV2 not implemented") -} - -func RegisterServingServiceServer(s *grpc.Server, srv ServingServiceServer) { - s.RegisterService(&_ServingService_serviceDesc, srv) -} - -func _ServingService_GetFeastServingInfo_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetFeastServingInfoRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ServingServiceServer).GetFeastServingInfo(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.serving.ServingService/GetFeastServingInfo", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ServingServiceServer).GetFeastServingInfo(ctx, req.(*GetFeastServingInfoRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _ServingService_GetOnlineFeaturesV2_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetOnlineFeaturesRequestV2) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ServingServiceServer).GetOnlineFeaturesV2(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/feast.serving.ServingService/GetOnlineFeaturesV2", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ServingServiceServer).GetOnlineFeaturesV2(ctx, req.(*GetOnlineFeaturesRequestV2)) - } - return interceptor(ctx, in, info, handler) -} - -var _ServingService_serviceDesc = grpc.ServiceDesc{ - ServiceName: "feast.serving.ServingService", - HandlerType: (*ServingServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "GetFeastServingInfo", - Handler: _ServingService_GetFeastServingInfo_Handler, - }, - { - MethodName: "GetOnlineFeaturesV2", - Handler: _ServingService_GetOnlineFeaturesV2_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "feast/serving/ServingService.proto", -} diff --git a/sdk/go/protos/feast/storage/Redis.pb.go b/sdk/go/protos/feast/storage/Redis.pb.go deleted file mode 100644 index 761bf2bb6e..0000000000 --- a/sdk/go/protos/feast/storage/Redis.pb.go +++ /dev/null @@ -1,196 +0,0 @@ -// -// Copyright 2019 The Feast Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Code generated by protoc-gen-go. DO NOT EDIT. -// versions: -// protoc-gen-go v1.25.0 -// protoc v3.12.4 -// source: feast/storage/Redis.proto - -package storage - -import ( - types "github.com/feast-dev/feast/sdk/go/protos/feast/types" - proto "github.com/golang/protobuf/proto" - protoreflect "google.golang.org/protobuf/reflect/protoreflect" - protoimpl "google.golang.org/protobuf/runtime/protoimpl" - reflect "reflect" - sync "sync" -) - -const ( - // Verify that this generated code is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) - // Verify that runtime/protoimpl is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) -) - -// This is a compile-time assertion that a sufficiently up-to-date version -// of the legacy proto package is being used. -const _ = proto.ProtoPackageIsVersion4 - -type RedisKeyV2 struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Project string `protobuf:"bytes,1,opt,name=project,proto3" json:"project,omitempty"` - EntityNames []string `protobuf:"bytes,2,rep,name=entity_names,json=entityNames,proto3" json:"entity_names,omitempty"` - EntityValues []*types.Value `protobuf:"bytes,3,rep,name=entity_values,json=entityValues,proto3" json:"entity_values,omitempty"` -} - -func (x *RedisKeyV2) Reset() { - *x = RedisKeyV2{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_storage_Redis_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *RedisKeyV2) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*RedisKeyV2) ProtoMessage() {} - -func (x *RedisKeyV2) ProtoReflect() protoreflect.Message { - mi := &file_feast_storage_Redis_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use RedisKeyV2.ProtoReflect.Descriptor instead. -func (*RedisKeyV2) Descriptor() ([]byte, []int) { - return file_feast_storage_Redis_proto_rawDescGZIP(), []int{0} -} - -func (x *RedisKeyV2) GetProject() string { - if x != nil { - return x.Project - } - return "" -} - -func (x *RedisKeyV2) GetEntityNames() []string { - if x != nil { - return x.EntityNames - } - return nil -} - -func (x *RedisKeyV2) GetEntityValues() []*types.Value { - if x != nil { - return x.EntityValues - } - return nil -} - -var File_feast_storage_Redis_proto protoreflect.FileDescriptor - -var file_feast_storage_Redis_proto_rawDesc = []byte{ - 0x0a, 0x19, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x2f, - 0x52, 0x65, 0x64, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0d, 0x66, 0x65, 0x61, - 0x73, 0x74, 0x2e, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x1a, 0x17, 0x66, 0x65, 0x61, 0x73, - 0x74, 0x2f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x1a, 0x17, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x74, 0x79, 0x70, 0x65, 0x73, - 0x2f, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x82, 0x01, 0x0a, - 0x0a, 0x52, 0x65, 0x64, 0x69, 0x73, 0x4b, 0x65, 0x79, 0x56, 0x32, 0x12, 0x18, 0x0a, 0x07, 0x70, - 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, 0x72, - 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x12, 0x21, 0x0a, 0x0c, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x5f, - 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0b, 0x65, 0x6e, 0x74, - 0x69, 0x74, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x12, 0x37, 0x0a, 0x0d, 0x65, 0x6e, 0x74, 0x69, - 0x74, 0x79, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, - 0x12, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2e, 0x56, 0x61, - 0x6c, 0x75, 0x65, 0x52, 0x0c, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x56, 0x61, 0x6c, 0x75, 0x65, - 0x73, 0x42, 0x59, 0x0a, 0x13, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x2e, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x42, 0x0a, 0x52, 0x65, 0x64, 0x69, 0x73, 0x50, - 0x72, 0x6f, 0x74, 0x6f, 0x5a, 0x36, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, - 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2d, 0x64, 0x65, 0x76, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, - 0x2f, 0x73, 0x64, 0x6b, 0x2f, 0x67, 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x66, - 0x65, 0x61, 0x73, 0x74, 0x2f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x62, 0x06, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x33, -} - -var ( - file_feast_storage_Redis_proto_rawDescOnce sync.Once - file_feast_storage_Redis_proto_rawDescData = file_feast_storage_Redis_proto_rawDesc -) - -func file_feast_storage_Redis_proto_rawDescGZIP() []byte { - file_feast_storage_Redis_proto_rawDescOnce.Do(func() { - file_feast_storage_Redis_proto_rawDescData = protoimpl.X.CompressGZIP(file_feast_storage_Redis_proto_rawDescData) - }) - return file_feast_storage_Redis_proto_rawDescData -} - -var file_feast_storage_Redis_proto_msgTypes = make([]protoimpl.MessageInfo, 1) -var file_feast_storage_Redis_proto_goTypes = []interface{}{ - (*RedisKeyV2)(nil), // 0: feast.storage.RedisKeyV2 - (*types.Value)(nil), // 1: feast.types.Value -} -var file_feast_storage_Redis_proto_depIdxs = []int32{ - 1, // 0: feast.storage.RedisKeyV2.entity_values:type_name -> feast.types.Value - 1, // [1:1] is the sub-list for method output_type - 1, // [1:1] is the sub-list for method input_type - 1, // [1:1] is the sub-list for extension type_name - 1, // [1:1] is the sub-list for extension extendee - 0, // [0:1] is the sub-list for field type_name -} - -func init() { file_feast_storage_Redis_proto_init() } -func file_feast_storage_Redis_proto_init() { - if File_feast_storage_Redis_proto != nil { - return - } - if !protoimpl.UnsafeEnabled { - file_feast_storage_Redis_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*RedisKeyV2); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - type x struct{} - out := protoimpl.TypeBuilder{ - File: protoimpl.DescBuilder{ - GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_feast_storage_Redis_proto_rawDesc, - NumEnums: 0, - NumMessages: 1, - NumExtensions: 0, - NumServices: 0, - }, - GoTypes: file_feast_storage_Redis_proto_goTypes, - DependencyIndexes: file_feast_storage_Redis_proto_depIdxs, - MessageInfos: file_feast_storage_Redis_proto_msgTypes, - }.Build() - File_feast_storage_Redis_proto = out.File - file_feast_storage_Redis_proto_rawDesc = nil - file_feast_storage_Redis_proto_goTypes = nil - file_feast_storage_Redis_proto_depIdxs = nil -} diff --git a/sdk/go/protos/feast/types/FeatureRow.pb.go b/sdk/go/protos/feast/types/FeatureRow.pb.go deleted file mode 100644 index f6fe6bfa42..0000000000 --- a/sdk/go/protos/feast/types/FeatureRow.pb.go +++ /dev/null @@ -1,219 +0,0 @@ -// -// Copyright 2018 The Feast Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Code generated by protoc-gen-go. DO NOT EDIT. -// versions: -// protoc-gen-go v1.25.0 -// protoc v3.10.0 -// source: feast/types/FeatureRow.proto - -package types - -import ( - proto "github.com/golang/protobuf/proto" - timestamp "github.com/golang/protobuf/ptypes/timestamp" - protoreflect "google.golang.org/protobuf/reflect/protoreflect" - protoimpl "google.golang.org/protobuf/runtime/protoimpl" - reflect "reflect" - sync "sync" -) - -const ( - // Verify that this generated code is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) - // Verify that runtime/protoimpl is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) -) - -// This is a compile-time assertion that a sufficiently up-to-date version -// of the legacy proto package is being used. -const _ = proto.ProtoPackageIsVersion4 - -type FeatureRow struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Fields in the feature row. - Fields []*Field `protobuf:"bytes,2,rep,name=fields,proto3" json:"fields,omitempty"` - // Timestamp of the feature row. While the actual definition of this timestamp may vary - // depending on the upstream feature creation pipelines, this is the timestamp that Feast - // will use to perform joins, determine latest values, and coalesce rows. - EventTimestamp *timestamp.Timestamp `protobuf:"bytes,3,opt,name=event_timestamp,json=eventTimestamp,proto3" json:"event_timestamp,omitempty"` - // Complete reference to the featureSet this featureRow belongs to, in the form of - // /. This value will be used by the feast ingestion job to filter - // rows, and write the values to the correct tables. - FeatureSet string `protobuf:"bytes,6,opt,name=feature_set,json=featureSet,proto3" json:"feature_set,omitempty"` - // Identifier tying this feature row to a specific ingestion job. - IngestionId string `protobuf:"bytes,7,opt,name=ingestion_id,json=ingestionId,proto3" json:"ingestion_id,omitempty"` -} - -func (x *FeatureRow) Reset() { - *x = FeatureRow{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_types_FeatureRow_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *FeatureRow) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*FeatureRow) ProtoMessage() {} - -func (x *FeatureRow) ProtoReflect() protoreflect.Message { - mi := &file_feast_types_FeatureRow_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use FeatureRow.ProtoReflect.Descriptor instead. -func (*FeatureRow) Descriptor() ([]byte, []int) { - return file_feast_types_FeatureRow_proto_rawDescGZIP(), []int{0} -} - -func (x *FeatureRow) GetFields() []*Field { - if x != nil { - return x.Fields - } - return nil -} - -func (x *FeatureRow) GetEventTimestamp() *timestamp.Timestamp { - if x != nil { - return x.EventTimestamp - } - return nil -} - -func (x *FeatureRow) GetFeatureSet() string { - if x != nil { - return x.FeatureSet - } - return "" -} - -func (x *FeatureRow) GetIngestionId() string { - if x != nil { - return x.IngestionId - } - return "" -} - -var File_feast_types_FeatureRow_proto protoreflect.FileDescriptor - -var file_feast_types_FeatureRow_proto_rawDesc = []byte{ - 0x0a, 0x1c, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x46, 0x65, - 0x61, 0x74, 0x75, 0x72, 0x65, 0x52, 0x6f, 0x77, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0b, - 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x73, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, - 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d, - 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x17, 0x66, 0x65, - 0x61, 0x73, 0x74, 0x2f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0xc1, 0x01, 0x0a, 0x0a, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, - 0x65, 0x52, 0x6f, 0x77, 0x12, 0x2a, 0x0a, 0x06, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x18, 0x02, - 0x20, 0x03, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x74, 0x79, 0x70, - 0x65, 0x73, 0x2e, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x52, 0x06, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, - 0x12, 0x43, 0x0a, 0x0f, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, - 0x61, 0x6d, 0x70, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, - 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, - 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x69, 0x6d, 0x65, - 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x1f, 0x0a, 0x0b, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, - 0x5f, 0x73, 0x65, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x66, 0x65, 0x61, 0x74, - 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x12, 0x21, 0x0a, 0x0c, 0x69, 0x6e, 0x67, 0x65, 0x73, 0x74, - 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x69, 0x6e, - 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x42, 0x5a, 0x0a, 0x11, 0x66, 0x65, 0x61, - 0x73, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x73, 0x42, 0x0f, - 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x52, 0x6f, 0x77, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x5a, - 0x34, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x65, 0x61, 0x73, - 0x74, 0x2d, 0x64, 0x65, 0x76, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x73, 0x64, 0x6b, 0x2f, - 0x67, 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, - 0x74, 0x79, 0x70, 0x65, 0x73, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} - -var ( - file_feast_types_FeatureRow_proto_rawDescOnce sync.Once - file_feast_types_FeatureRow_proto_rawDescData = file_feast_types_FeatureRow_proto_rawDesc -) - -func file_feast_types_FeatureRow_proto_rawDescGZIP() []byte { - file_feast_types_FeatureRow_proto_rawDescOnce.Do(func() { - file_feast_types_FeatureRow_proto_rawDescData = protoimpl.X.CompressGZIP(file_feast_types_FeatureRow_proto_rawDescData) - }) - return file_feast_types_FeatureRow_proto_rawDescData -} - -var file_feast_types_FeatureRow_proto_msgTypes = make([]protoimpl.MessageInfo, 1) -var file_feast_types_FeatureRow_proto_goTypes = []interface{}{ - (*FeatureRow)(nil), // 0: feast.types.FeatureRow - (*Field)(nil), // 1: feast.types.Field - (*timestamp.Timestamp)(nil), // 2: google.protobuf.Timestamp -} -var file_feast_types_FeatureRow_proto_depIdxs = []int32{ - 1, // 0: feast.types.FeatureRow.fields:type_name -> feast.types.Field - 2, // 1: feast.types.FeatureRow.event_timestamp:type_name -> google.protobuf.Timestamp - 2, // [2:2] is the sub-list for method output_type - 2, // [2:2] is the sub-list for method input_type - 2, // [2:2] is the sub-list for extension type_name - 2, // [2:2] is the sub-list for extension extendee - 0, // [0:2] is the sub-list for field type_name -} - -func init() { file_feast_types_FeatureRow_proto_init() } -func file_feast_types_FeatureRow_proto_init() { - if File_feast_types_FeatureRow_proto != nil { - return - } - file_feast_types_Field_proto_init() - if !protoimpl.UnsafeEnabled { - file_feast_types_FeatureRow_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*FeatureRow); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - type x struct{} - out := protoimpl.TypeBuilder{ - File: protoimpl.DescBuilder{ - GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_feast_types_FeatureRow_proto_rawDesc, - NumEnums: 0, - NumMessages: 1, - NumExtensions: 0, - NumServices: 0, - }, - GoTypes: file_feast_types_FeatureRow_proto_goTypes, - DependencyIndexes: file_feast_types_FeatureRow_proto_depIdxs, - MessageInfos: file_feast_types_FeatureRow_proto_msgTypes, - }.Build() - File_feast_types_FeatureRow_proto = out.File - file_feast_types_FeatureRow_proto_rawDesc = nil - file_feast_types_FeatureRow_proto_goTypes = nil - file_feast_types_FeatureRow_proto_depIdxs = nil -} diff --git a/sdk/go/protos/feast/types/FeatureRowExtended.pb.go b/sdk/go/protos/feast/types/FeatureRowExtended.pb.go deleted file mode 100644 index a01f0b0417..0000000000 --- a/sdk/go/protos/feast/types/FeatureRowExtended.pb.go +++ /dev/null @@ -1,370 +0,0 @@ -// -// Copyright 2018 The Feast Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Code generated by protoc-gen-go. DO NOT EDIT. -// versions: -// protoc-gen-go v1.25.0 -// protoc v3.10.0 -// source: feast/types/FeatureRowExtended.proto - -package types - -import ( - proto "github.com/golang/protobuf/proto" - timestamp "github.com/golang/protobuf/ptypes/timestamp" - protoreflect "google.golang.org/protobuf/reflect/protoreflect" - protoimpl "google.golang.org/protobuf/runtime/protoimpl" - reflect "reflect" - sync "sync" -) - -const ( - // Verify that this generated code is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) - // Verify that runtime/protoimpl is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) -) - -// This is a compile-time assertion that a sufficiently up-to-date version -// of the legacy proto package is being used. -const _ = proto.ProtoPackageIsVersion4 - -type Error struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Cause string `protobuf:"bytes,1,opt,name=cause,proto3" json:"cause,omitempty"` // exception class name - Transform string `protobuf:"bytes,2,opt,name=transform,proto3" json:"transform,omitempty"` // name of transform where the error occurred - Message string `protobuf:"bytes,3,opt,name=message,proto3" json:"message,omitempty"` - StackTrace string `protobuf:"bytes,4,opt,name=stack_trace,json=stackTrace,proto3" json:"stack_trace,omitempty"` -} - -func (x *Error) Reset() { - *x = Error{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_types_FeatureRowExtended_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Error) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Error) ProtoMessage() {} - -func (x *Error) ProtoReflect() protoreflect.Message { - mi := &file_feast_types_FeatureRowExtended_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Error.ProtoReflect.Descriptor instead. -func (*Error) Descriptor() ([]byte, []int) { - return file_feast_types_FeatureRowExtended_proto_rawDescGZIP(), []int{0} -} - -func (x *Error) GetCause() string { - if x != nil { - return x.Cause - } - return "" -} - -func (x *Error) GetTransform() string { - if x != nil { - return x.Transform - } - return "" -} - -func (x *Error) GetMessage() string { - if x != nil { - return x.Message - } - return "" -} - -func (x *Error) GetStackTrace() string { - if x != nil { - return x.StackTrace - } - return "" -} - -type Attempt struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Attempts int32 `protobuf:"varint,1,opt,name=attempts,proto3" json:"attempts,omitempty"` - Error *Error `protobuf:"bytes,2,opt,name=error,proto3" json:"error,omitempty"` -} - -func (x *Attempt) Reset() { - *x = Attempt{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_types_FeatureRowExtended_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Attempt) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Attempt) ProtoMessage() {} - -func (x *Attempt) ProtoReflect() protoreflect.Message { - mi := &file_feast_types_FeatureRowExtended_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Attempt.ProtoReflect.Descriptor instead. -func (*Attempt) Descriptor() ([]byte, []int) { - return file_feast_types_FeatureRowExtended_proto_rawDescGZIP(), []int{1} -} - -func (x *Attempt) GetAttempts() int32 { - if x != nil { - return x.Attempts - } - return 0 -} - -func (x *Attempt) GetError() *Error { - if x != nil { - return x.Error - } - return nil -} - -type FeatureRowExtended struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Row *FeatureRow `protobuf:"bytes,1,opt,name=row,proto3" json:"row,omitempty"` - LastAttempt *Attempt `protobuf:"bytes,2,opt,name=last_attempt,json=lastAttempt,proto3" json:"last_attempt,omitempty"` - FirstSeen *timestamp.Timestamp `protobuf:"bytes,3,opt,name=first_seen,json=firstSeen,proto3" json:"first_seen,omitempty"` -} - -func (x *FeatureRowExtended) Reset() { - *x = FeatureRowExtended{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_types_FeatureRowExtended_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *FeatureRowExtended) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*FeatureRowExtended) ProtoMessage() {} - -func (x *FeatureRowExtended) ProtoReflect() protoreflect.Message { - mi := &file_feast_types_FeatureRowExtended_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use FeatureRowExtended.ProtoReflect.Descriptor instead. -func (*FeatureRowExtended) Descriptor() ([]byte, []int) { - return file_feast_types_FeatureRowExtended_proto_rawDescGZIP(), []int{2} -} - -func (x *FeatureRowExtended) GetRow() *FeatureRow { - if x != nil { - return x.Row - } - return nil -} - -func (x *FeatureRowExtended) GetLastAttempt() *Attempt { - if x != nil { - return x.LastAttempt - } - return nil -} - -func (x *FeatureRowExtended) GetFirstSeen() *timestamp.Timestamp { - if x != nil { - return x.FirstSeen - } - return nil -} - -var File_feast_types_FeatureRowExtended_proto protoreflect.FileDescriptor - -var file_feast_types_FeatureRowExtended_proto_rawDesc = []byte{ - 0x0a, 0x24, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x46, 0x65, - 0x61, 0x74, 0x75, 0x72, 0x65, 0x52, 0x6f, 0x77, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x64, 0x65, 0x64, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0b, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x74, 0x79, - 0x70, 0x65, 0x73, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1c, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x74, 0x79, 0x70, 0x65, - 0x73, 0x2f, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x52, 0x6f, 0x77, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x22, 0x76, 0x0a, 0x05, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x14, 0x0a, 0x05, 0x63, - 0x61, 0x75, 0x73, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x63, 0x61, 0x75, 0x73, - 0x65, 0x12, 0x1c, 0x0a, 0x09, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x6f, 0x72, 0x6d, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x6f, 0x72, 0x6d, 0x12, - 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x1f, 0x0a, 0x0b, 0x73, 0x74, 0x61, - 0x63, 0x6b, 0x5f, 0x74, 0x72, 0x61, 0x63, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, - 0x73, 0x74, 0x61, 0x63, 0x6b, 0x54, 0x72, 0x61, 0x63, 0x65, 0x22, 0x4f, 0x0a, 0x07, 0x41, 0x74, - 0x74, 0x65, 0x6d, 0x70, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x61, 0x74, 0x74, 0x65, 0x6d, 0x70, 0x74, - 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x08, 0x61, 0x74, 0x74, 0x65, 0x6d, 0x70, 0x74, - 0x73, 0x12, 0x28, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x12, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2e, 0x45, - 0x72, 0x72, 0x6f, 0x72, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0xb3, 0x01, 0x0a, 0x12, - 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x52, 0x6f, 0x77, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x64, - 0x65, 0x64, 0x12, 0x29, 0x0a, 0x03, 0x72, 0x6f, 0x77, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x17, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2e, 0x46, 0x65, - 0x61, 0x74, 0x75, 0x72, 0x65, 0x52, 0x6f, 0x77, 0x52, 0x03, 0x72, 0x6f, 0x77, 0x12, 0x37, 0x0a, - 0x0c, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x61, 0x74, 0x74, 0x65, 0x6d, 0x70, 0x74, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, - 0x73, 0x2e, 0x41, 0x74, 0x74, 0x65, 0x6d, 0x70, 0x74, 0x52, 0x0b, 0x6c, 0x61, 0x73, 0x74, 0x41, - 0x74, 0x74, 0x65, 0x6d, 0x70, 0x74, 0x12, 0x39, 0x0a, 0x0a, 0x66, 0x69, 0x72, 0x73, 0x74, 0x5f, - 0x73, 0x65, 0x65, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, - 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, - 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x66, 0x69, 0x72, 0x73, 0x74, 0x53, 0x65, 0x65, - 0x6e, 0x42, 0x62, 0x0a, 0x11, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x2e, 0x74, 0x79, 0x70, 0x65, 0x73, 0x42, 0x17, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x52, - 0x6f, 0x77, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x64, 0x65, 0x64, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x5a, - 0x34, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x65, 0x61, 0x73, - 0x74, 0x2d, 0x64, 0x65, 0x76, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x73, 0x64, 0x6b, 0x2f, - 0x67, 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, - 0x74, 0x79, 0x70, 0x65, 0x73, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} - -var ( - file_feast_types_FeatureRowExtended_proto_rawDescOnce sync.Once - file_feast_types_FeatureRowExtended_proto_rawDescData = file_feast_types_FeatureRowExtended_proto_rawDesc -) - -func file_feast_types_FeatureRowExtended_proto_rawDescGZIP() []byte { - file_feast_types_FeatureRowExtended_proto_rawDescOnce.Do(func() { - file_feast_types_FeatureRowExtended_proto_rawDescData = protoimpl.X.CompressGZIP(file_feast_types_FeatureRowExtended_proto_rawDescData) - }) - return file_feast_types_FeatureRowExtended_proto_rawDescData -} - -var file_feast_types_FeatureRowExtended_proto_msgTypes = make([]protoimpl.MessageInfo, 3) -var file_feast_types_FeatureRowExtended_proto_goTypes = []interface{}{ - (*Error)(nil), // 0: feast.types.Error - (*Attempt)(nil), // 1: feast.types.Attempt - (*FeatureRowExtended)(nil), // 2: feast.types.FeatureRowExtended - (*FeatureRow)(nil), // 3: feast.types.FeatureRow - (*timestamp.Timestamp)(nil), // 4: google.protobuf.Timestamp -} -var file_feast_types_FeatureRowExtended_proto_depIdxs = []int32{ - 0, // 0: feast.types.Attempt.error:type_name -> feast.types.Error - 3, // 1: feast.types.FeatureRowExtended.row:type_name -> feast.types.FeatureRow - 1, // 2: feast.types.FeatureRowExtended.last_attempt:type_name -> feast.types.Attempt - 4, // 3: feast.types.FeatureRowExtended.first_seen:type_name -> google.protobuf.Timestamp - 4, // [4:4] is the sub-list for method output_type - 4, // [4:4] is the sub-list for method input_type - 4, // [4:4] is the sub-list for extension type_name - 4, // [4:4] is the sub-list for extension extendee - 0, // [0:4] is the sub-list for field type_name -} - -func init() { file_feast_types_FeatureRowExtended_proto_init() } -func file_feast_types_FeatureRowExtended_proto_init() { - if File_feast_types_FeatureRowExtended_proto != nil { - return - } - file_feast_types_FeatureRow_proto_init() - if !protoimpl.UnsafeEnabled { - file_feast_types_FeatureRowExtended_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Error); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_types_FeatureRowExtended_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Attempt); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_types_FeatureRowExtended_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*FeatureRowExtended); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - type x struct{} - out := protoimpl.TypeBuilder{ - File: protoimpl.DescBuilder{ - GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_feast_types_FeatureRowExtended_proto_rawDesc, - NumEnums: 0, - NumMessages: 3, - NumExtensions: 0, - NumServices: 0, - }, - GoTypes: file_feast_types_FeatureRowExtended_proto_goTypes, - DependencyIndexes: file_feast_types_FeatureRowExtended_proto_depIdxs, - MessageInfos: file_feast_types_FeatureRowExtended_proto_msgTypes, - }.Build() - File_feast_types_FeatureRowExtended_proto = out.File - file_feast_types_FeatureRowExtended_proto_rawDesc = nil - file_feast_types_FeatureRowExtended_proto_goTypes = nil - file_feast_types_FeatureRowExtended_proto_depIdxs = nil -} diff --git a/sdk/go/protos/feast/types/Field.pb.go b/sdk/go/protos/feast/types/Field.pb.go deleted file mode 100644 index 9dad77cdb9..0000000000 --- a/sdk/go/protos/feast/types/Field.pb.go +++ /dev/null @@ -1,182 +0,0 @@ -// -// Copyright 2018 The Feast Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Code generated by protoc-gen-go. DO NOT EDIT. -// versions: -// protoc-gen-go v1.25.0 -// protoc v3.12.4 -// source: feast/types/Field.proto - -package types - -import ( - proto "github.com/golang/protobuf/proto" - protoreflect "google.golang.org/protobuf/reflect/protoreflect" - protoimpl "google.golang.org/protobuf/runtime/protoimpl" - reflect "reflect" - sync "sync" -) - -const ( - // Verify that this generated code is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) - // Verify that runtime/protoimpl is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) -) - -// This is a compile-time assertion that a sufficiently up-to-date version -// of the legacy proto package is being used. -const _ = proto.ProtoPackageIsVersion4 - -type Field struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - Value *Value `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` -} - -func (x *Field) Reset() { - *x = Field{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_types_Field_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Field) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Field) ProtoMessage() {} - -func (x *Field) ProtoReflect() protoreflect.Message { - mi := &file_feast_types_Field_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Field.ProtoReflect.Descriptor instead. -func (*Field) Descriptor() ([]byte, []int) { - return file_feast_types_Field_proto_rawDescGZIP(), []int{0} -} - -func (x *Field) GetName() string { - if x != nil { - return x.Name - } - return "" -} - -func (x *Field) GetValue() *Value { - if x != nil { - return x.Value - } - return nil -} - -var File_feast_types_Field_proto protoreflect.FileDescriptor - -var file_feast_types_Field_proto_rawDesc = []byte{ - 0x0a, 0x17, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x46, 0x69, - 0x65, 0x6c, 0x64, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0b, 0x66, 0x65, 0x61, 0x73, 0x74, - 0x2e, 0x74, 0x79, 0x70, 0x65, 0x73, 0x1a, 0x17, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x74, 0x79, - 0x70, 0x65, 0x73, 0x2f, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, - 0x45, 0x0a, 0x05, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x28, 0x0a, 0x05, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x66, 0x65, - 0x61, 0x73, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, - 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x42, 0x55, 0x0a, 0x11, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x73, 0x42, 0x0a, 0x46, 0x69, 0x65, - 0x6c, 0x64, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x5a, 0x34, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, - 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2d, 0x64, 0x65, 0x76, 0x2f, 0x66, 0x65, - 0x61, 0x73, 0x74, 0x2f, 0x73, 0x64, 0x6b, 0x2f, 0x67, 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x73, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x62, 0x06, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x33, -} - -var ( - file_feast_types_Field_proto_rawDescOnce sync.Once - file_feast_types_Field_proto_rawDescData = file_feast_types_Field_proto_rawDesc -) - -func file_feast_types_Field_proto_rawDescGZIP() []byte { - file_feast_types_Field_proto_rawDescOnce.Do(func() { - file_feast_types_Field_proto_rawDescData = protoimpl.X.CompressGZIP(file_feast_types_Field_proto_rawDescData) - }) - return file_feast_types_Field_proto_rawDescData -} - -var file_feast_types_Field_proto_msgTypes = make([]protoimpl.MessageInfo, 1) -var file_feast_types_Field_proto_goTypes = []interface{}{ - (*Field)(nil), // 0: feast.types.Field - (*Value)(nil), // 1: feast.types.Value -} -var file_feast_types_Field_proto_depIdxs = []int32{ - 1, // 0: feast.types.Field.value:type_name -> feast.types.Value - 1, // [1:1] is the sub-list for method output_type - 1, // [1:1] is the sub-list for method input_type - 1, // [1:1] is the sub-list for extension type_name - 1, // [1:1] is the sub-list for extension extendee - 0, // [0:1] is the sub-list for field type_name -} - -func init() { file_feast_types_Field_proto_init() } -func file_feast_types_Field_proto_init() { - if File_feast_types_Field_proto != nil { - return - } - file_feast_types_Value_proto_init() - if !protoimpl.UnsafeEnabled { - file_feast_types_Field_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Field); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - type x struct{} - out := protoimpl.TypeBuilder{ - File: protoimpl.DescBuilder{ - GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_feast_types_Field_proto_rawDesc, - NumEnums: 0, - NumMessages: 1, - NumExtensions: 0, - NumServices: 0, - }, - GoTypes: file_feast_types_Field_proto_goTypes, - DependencyIndexes: file_feast_types_Field_proto_depIdxs, - MessageInfos: file_feast_types_Field_proto_msgTypes, - }.Build() - File_feast_types_Field_proto = out.File - file_feast_types_Field_proto_rawDesc = nil - file_feast_types_Field_proto_goTypes = nil - file_feast_types_Field_proto_depIdxs = nil -} diff --git a/sdk/go/protos/feast/types/Value.pb.go b/sdk/go/protos/feast/types/Value.pb.go deleted file mode 100644 index 3b19435633..0000000000 --- a/sdk/go/protos/feast/types/Value.pb.go +++ /dev/null @@ -1,1022 +0,0 @@ -// -// Copyright 2018 The Feast Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Code generated by protoc-gen-go. DO NOT EDIT. -// versions: -// protoc-gen-go v1.25.0 -// protoc v3.12.4 -// source: feast/types/Value.proto - -package types - -import ( - proto "github.com/golang/protobuf/proto" - protoreflect "google.golang.org/protobuf/reflect/protoreflect" - protoimpl "google.golang.org/protobuf/runtime/protoimpl" - reflect "reflect" - sync "sync" -) - -const ( - // Verify that this generated code is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) - // Verify that runtime/protoimpl is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) -) - -// This is a compile-time assertion that a sufficiently up-to-date version -// of the legacy proto package is being used. -const _ = proto.ProtoPackageIsVersion4 - -type ValueType_Enum int32 - -const ( - ValueType_INVALID ValueType_Enum = 0 - ValueType_BYTES ValueType_Enum = 1 - ValueType_STRING ValueType_Enum = 2 - ValueType_INT32 ValueType_Enum = 3 - ValueType_INT64 ValueType_Enum = 4 - ValueType_DOUBLE ValueType_Enum = 5 - ValueType_FLOAT ValueType_Enum = 6 - ValueType_BOOL ValueType_Enum = 7 - ValueType_BYTES_LIST ValueType_Enum = 11 - ValueType_STRING_LIST ValueType_Enum = 12 - ValueType_INT32_LIST ValueType_Enum = 13 - ValueType_INT64_LIST ValueType_Enum = 14 - ValueType_DOUBLE_LIST ValueType_Enum = 15 - ValueType_FLOAT_LIST ValueType_Enum = 16 - ValueType_BOOL_LIST ValueType_Enum = 17 -) - -// Enum value maps for ValueType_Enum. -var ( - ValueType_Enum_name = map[int32]string{ - 0: "INVALID", - 1: "BYTES", - 2: "STRING", - 3: "INT32", - 4: "INT64", - 5: "DOUBLE", - 6: "FLOAT", - 7: "BOOL", - 11: "BYTES_LIST", - 12: "STRING_LIST", - 13: "INT32_LIST", - 14: "INT64_LIST", - 15: "DOUBLE_LIST", - 16: "FLOAT_LIST", - 17: "BOOL_LIST", - } - ValueType_Enum_value = map[string]int32{ - "INVALID": 0, - "BYTES": 1, - "STRING": 2, - "INT32": 3, - "INT64": 4, - "DOUBLE": 5, - "FLOAT": 6, - "BOOL": 7, - "BYTES_LIST": 11, - "STRING_LIST": 12, - "INT32_LIST": 13, - "INT64_LIST": 14, - "DOUBLE_LIST": 15, - "FLOAT_LIST": 16, - "BOOL_LIST": 17, - } -) - -func (x ValueType_Enum) Enum() *ValueType_Enum { - p := new(ValueType_Enum) - *p = x - return p -} - -func (x ValueType_Enum) String() string { - return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) -} - -func (ValueType_Enum) Descriptor() protoreflect.EnumDescriptor { - return file_feast_types_Value_proto_enumTypes[0].Descriptor() -} - -func (ValueType_Enum) Type() protoreflect.EnumType { - return &file_feast_types_Value_proto_enumTypes[0] -} - -func (x ValueType_Enum) Number() protoreflect.EnumNumber { - return protoreflect.EnumNumber(x) -} - -// Deprecated: Use ValueType_Enum.Descriptor instead. -func (ValueType_Enum) EnumDescriptor() ([]byte, []int) { - return file_feast_types_Value_proto_rawDescGZIP(), []int{0, 0} -} - -type ValueType struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields -} - -func (x *ValueType) Reset() { - *x = ValueType{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_types_Value_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *ValueType) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*ValueType) ProtoMessage() {} - -func (x *ValueType) ProtoReflect() protoreflect.Message { - mi := &file_feast_types_Value_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use ValueType.ProtoReflect.Descriptor instead. -func (*ValueType) Descriptor() ([]byte, []int) { - return file_feast_types_Value_proto_rawDescGZIP(), []int{0} -} - -type Value struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // ValueType is referenced by the metadata types, FeatureInfo and EntityInfo. - // The enum values do not have to match the oneof val field ids, but they should. - // - // Types that are assignable to Val: - // *Value_BytesVal - // *Value_StringVal - // *Value_Int32Val - // *Value_Int64Val - // *Value_DoubleVal - // *Value_FloatVal - // *Value_BoolVal - // *Value_BytesListVal - // *Value_StringListVal - // *Value_Int32ListVal - // *Value_Int64ListVal - // *Value_DoubleListVal - // *Value_FloatListVal - // *Value_BoolListVal - Val isValue_Val `protobuf_oneof:"val"` -} - -func (x *Value) Reset() { - *x = Value{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_types_Value_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Value) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Value) ProtoMessage() {} - -func (x *Value) ProtoReflect() protoreflect.Message { - mi := &file_feast_types_Value_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Value.ProtoReflect.Descriptor instead. -func (*Value) Descriptor() ([]byte, []int) { - return file_feast_types_Value_proto_rawDescGZIP(), []int{1} -} - -func (m *Value) GetVal() isValue_Val { - if m != nil { - return m.Val - } - return nil -} - -func (x *Value) GetBytesVal() []byte { - if x, ok := x.GetVal().(*Value_BytesVal); ok { - return x.BytesVal - } - return nil -} - -func (x *Value) GetStringVal() string { - if x, ok := x.GetVal().(*Value_StringVal); ok { - return x.StringVal - } - return "" -} - -func (x *Value) GetInt32Val() int32 { - if x, ok := x.GetVal().(*Value_Int32Val); ok { - return x.Int32Val - } - return 0 -} - -func (x *Value) GetInt64Val() int64 { - if x, ok := x.GetVal().(*Value_Int64Val); ok { - return x.Int64Val - } - return 0 -} - -func (x *Value) GetDoubleVal() float64 { - if x, ok := x.GetVal().(*Value_DoubleVal); ok { - return x.DoubleVal - } - return 0 -} - -func (x *Value) GetFloatVal() float32 { - if x, ok := x.GetVal().(*Value_FloatVal); ok { - return x.FloatVal - } - return 0 -} - -func (x *Value) GetBoolVal() bool { - if x, ok := x.GetVal().(*Value_BoolVal); ok { - return x.BoolVal - } - return false -} - -func (x *Value) GetBytesListVal() *BytesList { - if x, ok := x.GetVal().(*Value_BytesListVal); ok { - return x.BytesListVal - } - return nil -} - -func (x *Value) GetStringListVal() *StringList { - if x, ok := x.GetVal().(*Value_StringListVal); ok { - return x.StringListVal - } - return nil -} - -func (x *Value) GetInt32ListVal() *Int32List { - if x, ok := x.GetVal().(*Value_Int32ListVal); ok { - return x.Int32ListVal - } - return nil -} - -func (x *Value) GetInt64ListVal() *Int64List { - if x, ok := x.GetVal().(*Value_Int64ListVal); ok { - return x.Int64ListVal - } - return nil -} - -func (x *Value) GetDoubleListVal() *DoubleList { - if x, ok := x.GetVal().(*Value_DoubleListVal); ok { - return x.DoubleListVal - } - return nil -} - -func (x *Value) GetFloatListVal() *FloatList { - if x, ok := x.GetVal().(*Value_FloatListVal); ok { - return x.FloatListVal - } - return nil -} - -func (x *Value) GetBoolListVal() *BoolList { - if x, ok := x.GetVal().(*Value_BoolListVal); ok { - return x.BoolListVal - } - return nil -} - -type isValue_Val interface { - isValue_Val() -} - -type Value_BytesVal struct { - BytesVal []byte `protobuf:"bytes,1,opt,name=bytes_val,json=bytesVal,proto3,oneof"` -} - -type Value_StringVal struct { - StringVal string `protobuf:"bytes,2,opt,name=string_val,json=stringVal,proto3,oneof"` -} - -type Value_Int32Val struct { - Int32Val int32 `protobuf:"varint,3,opt,name=int32_val,json=int32Val,proto3,oneof"` -} - -type Value_Int64Val struct { - Int64Val int64 `protobuf:"varint,4,opt,name=int64_val,json=int64Val,proto3,oneof"` -} - -type Value_DoubleVal struct { - DoubleVal float64 `protobuf:"fixed64,5,opt,name=double_val,json=doubleVal,proto3,oneof"` -} - -type Value_FloatVal struct { - FloatVal float32 `protobuf:"fixed32,6,opt,name=float_val,json=floatVal,proto3,oneof"` -} - -type Value_BoolVal struct { - BoolVal bool `protobuf:"varint,7,opt,name=bool_val,json=boolVal,proto3,oneof"` -} - -type Value_BytesListVal struct { - BytesListVal *BytesList `protobuf:"bytes,11,opt,name=bytes_list_val,json=bytesListVal,proto3,oneof"` -} - -type Value_StringListVal struct { - StringListVal *StringList `protobuf:"bytes,12,opt,name=string_list_val,json=stringListVal,proto3,oneof"` -} - -type Value_Int32ListVal struct { - Int32ListVal *Int32List `protobuf:"bytes,13,opt,name=int32_list_val,json=int32ListVal,proto3,oneof"` -} - -type Value_Int64ListVal struct { - Int64ListVal *Int64List `protobuf:"bytes,14,opt,name=int64_list_val,json=int64ListVal,proto3,oneof"` -} - -type Value_DoubleListVal struct { - DoubleListVal *DoubleList `protobuf:"bytes,15,opt,name=double_list_val,json=doubleListVal,proto3,oneof"` -} - -type Value_FloatListVal struct { - FloatListVal *FloatList `protobuf:"bytes,16,opt,name=float_list_val,json=floatListVal,proto3,oneof"` -} - -type Value_BoolListVal struct { - BoolListVal *BoolList `protobuf:"bytes,17,opt,name=bool_list_val,json=boolListVal,proto3,oneof"` -} - -func (*Value_BytesVal) isValue_Val() {} - -func (*Value_StringVal) isValue_Val() {} - -func (*Value_Int32Val) isValue_Val() {} - -func (*Value_Int64Val) isValue_Val() {} - -func (*Value_DoubleVal) isValue_Val() {} - -func (*Value_FloatVal) isValue_Val() {} - -func (*Value_BoolVal) isValue_Val() {} - -func (*Value_BytesListVal) isValue_Val() {} - -func (*Value_StringListVal) isValue_Val() {} - -func (*Value_Int32ListVal) isValue_Val() {} - -func (*Value_Int64ListVal) isValue_Val() {} - -func (*Value_DoubleListVal) isValue_Val() {} - -func (*Value_FloatListVal) isValue_Val() {} - -func (*Value_BoolListVal) isValue_Val() {} - -type BytesList struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Val [][]byte `protobuf:"bytes,1,rep,name=val,proto3" json:"val,omitempty"` -} - -func (x *BytesList) Reset() { - *x = BytesList{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_types_Value_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *BytesList) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*BytesList) ProtoMessage() {} - -func (x *BytesList) ProtoReflect() protoreflect.Message { - mi := &file_feast_types_Value_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use BytesList.ProtoReflect.Descriptor instead. -func (*BytesList) Descriptor() ([]byte, []int) { - return file_feast_types_Value_proto_rawDescGZIP(), []int{2} -} - -func (x *BytesList) GetVal() [][]byte { - if x != nil { - return x.Val - } - return nil -} - -type StringList struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Val []string `protobuf:"bytes,1,rep,name=val,proto3" json:"val,omitempty"` -} - -func (x *StringList) Reset() { - *x = StringList{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_types_Value_proto_msgTypes[3] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *StringList) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*StringList) ProtoMessage() {} - -func (x *StringList) ProtoReflect() protoreflect.Message { - mi := &file_feast_types_Value_proto_msgTypes[3] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use StringList.ProtoReflect.Descriptor instead. -func (*StringList) Descriptor() ([]byte, []int) { - return file_feast_types_Value_proto_rawDescGZIP(), []int{3} -} - -func (x *StringList) GetVal() []string { - if x != nil { - return x.Val - } - return nil -} - -type Int32List struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Val []int32 `protobuf:"varint,1,rep,packed,name=val,proto3" json:"val,omitempty"` -} - -func (x *Int32List) Reset() { - *x = Int32List{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_types_Value_proto_msgTypes[4] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Int32List) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Int32List) ProtoMessage() {} - -func (x *Int32List) ProtoReflect() protoreflect.Message { - mi := &file_feast_types_Value_proto_msgTypes[4] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Int32List.ProtoReflect.Descriptor instead. -func (*Int32List) Descriptor() ([]byte, []int) { - return file_feast_types_Value_proto_rawDescGZIP(), []int{4} -} - -func (x *Int32List) GetVal() []int32 { - if x != nil { - return x.Val - } - return nil -} - -type Int64List struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Val []int64 `protobuf:"varint,1,rep,packed,name=val,proto3" json:"val,omitempty"` -} - -func (x *Int64List) Reset() { - *x = Int64List{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_types_Value_proto_msgTypes[5] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Int64List) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Int64List) ProtoMessage() {} - -func (x *Int64List) ProtoReflect() protoreflect.Message { - mi := &file_feast_types_Value_proto_msgTypes[5] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Int64List.ProtoReflect.Descriptor instead. -func (*Int64List) Descriptor() ([]byte, []int) { - return file_feast_types_Value_proto_rawDescGZIP(), []int{5} -} - -func (x *Int64List) GetVal() []int64 { - if x != nil { - return x.Val - } - return nil -} - -type DoubleList struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Val []float64 `protobuf:"fixed64,1,rep,packed,name=val,proto3" json:"val,omitempty"` -} - -func (x *DoubleList) Reset() { - *x = DoubleList{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_types_Value_proto_msgTypes[6] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *DoubleList) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*DoubleList) ProtoMessage() {} - -func (x *DoubleList) ProtoReflect() protoreflect.Message { - mi := &file_feast_types_Value_proto_msgTypes[6] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use DoubleList.ProtoReflect.Descriptor instead. -func (*DoubleList) Descriptor() ([]byte, []int) { - return file_feast_types_Value_proto_rawDescGZIP(), []int{6} -} - -func (x *DoubleList) GetVal() []float64 { - if x != nil { - return x.Val - } - return nil -} - -type FloatList struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Val []float32 `protobuf:"fixed32,1,rep,packed,name=val,proto3" json:"val,omitempty"` -} - -func (x *FloatList) Reset() { - *x = FloatList{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_types_Value_proto_msgTypes[7] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *FloatList) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*FloatList) ProtoMessage() {} - -func (x *FloatList) ProtoReflect() protoreflect.Message { - mi := &file_feast_types_Value_proto_msgTypes[7] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use FloatList.ProtoReflect.Descriptor instead. -func (*FloatList) Descriptor() ([]byte, []int) { - return file_feast_types_Value_proto_rawDescGZIP(), []int{7} -} - -func (x *FloatList) GetVal() []float32 { - if x != nil { - return x.Val - } - return nil -} - -type BoolList struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Val []bool `protobuf:"varint,1,rep,packed,name=val,proto3" json:"val,omitempty"` -} - -func (x *BoolList) Reset() { - *x = BoolList{} - if protoimpl.UnsafeEnabled { - mi := &file_feast_types_Value_proto_msgTypes[8] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *BoolList) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*BoolList) ProtoMessage() {} - -func (x *BoolList) ProtoReflect() protoreflect.Message { - mi := &file_feast_types_Value_proto_msgTypes[8] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use BoolList.ProtoReflect.Descriptor instead. -func (*BoolList) Descriptor() ([]byte, []int) { - return file_feast_types_Value_proto_rawDescGZIP(), []int{8} -} - -func (x *BoolList) GetVal() []bool { - if x != nil { - return x.Val - } - return nil -} - -var File_feast_types_Value_proto protoreflect.FileDescriptor - -var file_feast_types_Value_proto_rawDesc = []byte{ - 0x0a, 0x17, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x56, 0x61, - 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0b, 0x66, 0x65, 0x61, 0x73, 0x74, - 0x2e, 0x74, 0x79, 0x70, 0x65, 0x73, 0x22, 0xe0, 0x01, 0x0a, 0x09, 0x56, 0x61, 0x6c, 0x75, 0x65, - 0x54, 0x79, 0x70, 0x65, 0x22, 0xd2, 0x01, 0x0a, 0x04, 0x45, 0x6e, 0x75, 0x6d, 0x12, 0x0b, 0x0a, - 0x07, 0x49, 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x42, 0x59, - 0x54, 0x45, 0x53, 0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x53, 0x54, 0x52, 0x49, 0x4e, 0x47, 0x10, - 0x02, 0x12, 0x09, 0x0a, 0x05, 0x49, 0x4e, 0x54, 0x33, 0x32, 0x10, 0x03, 0x12, 0x09, 0x0a, 0x05, - 0x49, 0x4e, 0x54, 0x36, 0x34, 0x10, 0x04, 0x12, 0x0a, 0x0a, 0x06, 0x44, 0x4f, 0x55, 0x42, 0x4c, - 0x45, 0x10, 0x05, 0x12, 0x09, 0x0a, 0x05, 0x46, 0x4c, 0x4f, 0x41, 0x54, 0x10, 0x06, 0x12, 0x08, - 0x0a, 0x04, 0x42, 0x4f, 0x4f, 0x4c, 0x10, 0x07, 0x12, 0x0e, 0x0a, 0x0a, 0x42, 0x59, 0x54, 0x45, - 0x53, 0x5f, 0x4c, 0x49, 0x53, 0x54, 0x10, 0x0b, 0x12, 0x0f, 0x0a, 0x0b, 0x53, 0x54, 0x52, 0x49, - 0x4e, 0x47, 0x5f, 0x4c, 0x49, 0x53, 0x54, 0x10, 0x0c, 0x12, 0x0e, 0x0a, 0x0a, 0x49, 0x4e, 0x54, - 0x33, 0x32, 0x5f, 0x4c, 0x49, 0x53, 0x54, 0x10, 0x0d, 0x12, 0x0e, 0x0a, 0x0a, 0x49, 0x4e, 0x54, - 0x36, 0x34, 0x5f, 0x4c, 0x49, 0x53, 0x54, 0x10, 0x0e, 0x12, 0x0f, 0x0a, 0x0b, 0x44, 0x4f, 0x55, - 0x42, 0x4c, 0x45, 0x5f, 0x4c, 0x49, 0x53, 0x54, 0x10, 0x0f, 0x12, 0x0e, 0x0a, 0x0a, 0x46, 0x4c, - 0x4f, 0x41, 0x54, 0x5f, 0x4c, 0x49, 0x53, 0x54, 0x10, 0x10, 0x12, 0x0d, 0x0a, 0x09, 0x42, 0x4f, - 0x4f, 0x4c, 0x5f, 0x4c, 0x49, 0x53, 0x54, 0x10, 0x11, 0x22, 0xac, 0x05, 0x0a, 0x05, 0x56, 0x61, - 0x6c, 0x75, 0x65, 0x12, 0x1d, 0x0a, 0x09, 0x62, 0x79, 0x74, 0x65, 0x73, 0x5f, 0x76, 0x61, 0x6c, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x48, 0x00, 0x52, 0x08, 0x62, 0x79, 0x74, 0x65, 0x73, 0x56, - 0x61, 0x6c, 0x12, 0x1f, 0x0a, 0x0a, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x76, 0x61, 0x6c, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x09, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, - 0x56, 0x61, 0x6c, 0x12, 0x1d, 0x0a, 0x09, 0x69, 0x6e, 0x74, 0x33, 0x32, 0x5f, 0x76, 0x61, 0x6c, - 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x48, 0x00, 0x52, 0x08, 0x69, 0x6e, 0x74, 0x33, 0x32, 0x56, - 0x61, 0x6c, 0x12, 0x1d, 0x0a, 0x09, 0x69, 0x6e, 0x74, 0x36, 0x34, 0x5f, 0x76, 0x61, 0x6c, 0x18, - 0x04, 0x20, 0x01, 0x28, 0x03, 0x48, 0x00, 0x52, 0x08, 0x69, 0x6e, 0x74, 0x36, 0x34, 0x56, 0x61, - 0x6c, 0x12, 0x1f, 0x0a, 0x0a, 0x64, 0x6f, 0x75, 0x62, 0x6c, 0x65, 0x5f, 0x76, 0x61, 0x6c, 0x18, - 0x05, 0x20, 0x01, 0x28, 0x01, 0x48, 0x00, 0x52, 0x09, 0x64, 0x6f, 0x75, 0x62, 0x6c, 0x65, 0x56, - 0x61, 0x6c, 0x12, 0x1d, 0x0a, 0x09, 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x5f, 0x76, 0x61, 0x6c, 0x18, - 0x06, 0x20, 0x01, 0x28, 0x02, 0x48, 0x00, 0x52, 0x08, 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x56, 0x61, - 0x6c, 0x12, 0x1b, 0x0a, 0x08, 0x62, 0x6f, 0x6f, 0x6c, 0x5f, 0x76, 0x61, 0x6c, 0x18, 0x07, 0x20, - 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x07, 0x62, 0x6f, 0x6f, 0x6c, 0x56, 0x61, 0x6c, 0x12, 0x3e, - 0x0a, 0x0e, 0x62, 0x79, 0x74, 0x65, 0x73, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x5f, 0x76, 0x61, 0x6c, - 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x74, - 0x79, 0x70, 0x65, 0x73, 0x2e, 0x42, 0x79, 0x74, 0x65, 0x73, 0x4c, 0x69, 0x73, 0x74, 0x48, 0x00, - 0x52, 0x0c, 0x62, 0x79, 0x74, 0x65, 0x73, 0x4c, 0x69, 0x73, 0x74, 0x56, 0x61, 0x6c, 0x12, 0x41, - 0x0a, 0x0f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x5f, 0x76, 0x61, - 0x6c, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, - 0x74, 0x79, 0x70, 0x65, 0x73, 0x2e, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x4c, 0x69, 0x73, 0x74, - 0x48, 0x00, 0x52, 0x0d, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x4c, 0x69, 0x73, 0x74, 0x56, 0x61, - 0x6c, 0x12, 0x3e, 0x0a, 0x0e, 0x69, 0x6e, 0x74, 0x33, 0x32, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x5f, - 0x76, 0x61, 0x6c, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x66, 0x65, 0x61, 0x73, - 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2e, 0x49, 0x6e, 0x74, 0x33, 0x32, 0x4c, 0x69, 0x73, - 0x74, 0x48, 0x00, 0x52, 0x0c, 0x69, 0x6e, 0x74, 0x33, 0x32, 0x4c, 0x69, 0x73, 0x74, 0x56, 0x61, - 0x6c, 0x12, 0x3e, 0x0a, 0x0e, 0x69, 0x6e, 0x74, 0x36, 0x34, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x5f, - 0x76, 0x61, 0x6c, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x66, 0x65, 0x61, 0x73, - 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2e, 0x49, 0x6e, 0x74, 0x36, 0x34, 0x4c, 0x69, 0x73, - 0x74, 0x48, 0x00, 0x52, 0x0c, 0x69, 0x6e, 0x74, 0x36, 0x34, 0x4c, 0x69, 0x73, 0x74, 0x56, 0x61, - 0x6c, 0x12, 0x41, 0x0a, 0x0f, 0x64, 0x6f, 0x75, 0x62, 0x6c, 0x65, 0x5f, 0x6c, 0x69, 0x73, 0x74, - 0x5f, 0x76, 0x61, 0x6c, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x66, 0x65, 0x61, - 0x73, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2e, 0x44, 0x6f, 0x75, 0x62, 0x6c, 0x65, 0x4c, - 0x69, 0x73, 0x74, 0x48, 0x00, 0x52, 0x0d, 0x64, 0x6f, 0x75, 0x62, 0x6c, 0x65, 0x4c, 0x69, 0x73, - 0x74, 0x56, 0x61, 0x6c, 0x12, 0x3e, 0x0a, 0x0e, 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x5f, 0x6c, 0x69, - 0x73, 0x74, 0x5f, 0x76, 0x61, 0x6c, 0x18, 0x10, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x66, - 0x65, 0x61, 0x73, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2e, 0x46, 0x6c, 0x6f, 0x61, 0x74, - 0x4c, 0x69, 0x73, 0x74, 0x48, 0x00, 0x52, 0x0c, 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x4c, 0x69, 0x73, - 0x74, 0x56, 0x61, 0x6c, 0x12, 0x3b, 0x0a, 0x0d, 0x62, 0x6f, 0x6f, 0x6c, 0x5f, 0x6c, 0x69, 0x73, - 0x74, 0x5f, 0x76, 0x61, 0x6c, 0x18, 0x11, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x66, 0x65, - 0x61, 0x73, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2e, 0x42, 0x6f, 0x6f, 0x6c, 0x4c, 0x69, - 0x73, 0x74, 0x48, 0x00, 0x52, 0x0b, 0x62, 0x6f, 0x6f, 0x6c, 0x4c, 0x69, 0x73, 0x74, 0x56, 0x61, - 0x6c, 0x42, 0x05, 0x0a, 0x03, 0x76, 0x61, 0x6c, 0x22, 0x1d, 0x0a, 0x09, 0x42, 0x79, 0x74, 0x65, - 0x73, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x76, 0x61, 0x6c, 0x18, 0x01, 0x20, 0x03, - 0x28, 0x0c, 0x52, 0x03, 0x76, 0x61, 0x6c, 0x22, 0x1e, 0x0a, 0x0a, 0x53, 0x74, 0x72, 0x69, 0x6e, - 0x67, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x76, 0x61, 0x6c, 0x18, 0x01, 0x20, 0x03, - 0x28, 0x09, 0x52, 0x03, 0x76, 0x61, 0x6c, 0x22, 0x1d, 0x0a, 0x09, 0x49, 0x6e, 0x74, 0x33, 0x32, - 0x4c, 0x69, 0x73, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x76, 0x61, 0x6c, 0x18, 0x01, 0x20, 0x03, 0x28, - 0x05, 0x52, 0x03, 0x76, 0x61, 0x6c, 0x22, 0x1d, 0x0a, 0x09, 0x49, 0x6e, 0x74, 0x36, 0x34, 0x4c, - 0x69, 0x73, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x76, 0x61, 0x6c, 0x18, 0x01, 0x20, 0x03, 0x28, 0x03, - 0x52, 0x03, 0x76, 0x61, 0x6c, 0x22, 0x1e, 0x0a, 0x0a, 0x44, 0x6f, 0x75, 0x62, 0x6c, 0x65, 0x4c, - 0x69, 0x73, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x76, 0x61, 0x6c, 0x18, 0x01, 0x20, 0x03, 0x28, 0x01, - 0x52, 0x03, 0x76, 0x61, 0x6c, 0x22, 0x1d, 0x0a, 0x09, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x4c, 0x69, - 0x73, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x76, 0x61, 0x6c, 0x18, 0x01, 0x20, 0x03, 0x28, 0x02, 0x52, - 0x03, 0x76, 0x61, 0x6c, 0x22, 0x1c, 0x0a, 0x08, 0x42, 0x6f, 0x6f, 0x6c, 0x4c, 0x69, 0x73, 0x74, - 0x12, 0x10, 0x0a, 0x03, 0x76, 0x61, 0x6c, 0x18, 0x01, 0x20, 0x03, 0x28, 0x08, 0x52, 0x03, 0x76, - 0x61, 0x6c, 0x42, 0x55, 0x0a, 0x11, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x73, 0x42, 0x0a, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x50, 0x72, - 0x6f, 0x74, 0x6f, 0x5a, 0x34, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, - 0x66, 0x65, 0x61, 0x73, 0x74, 0x2d, 0x64, 0x65, 0x76, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, - 0x73, 0x64, 0x6b, 0x2f, 0x67, 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x66, 0x65, - 0x61, 0x73, 0x74, 0x2f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x33, -} - -var ( - file_feast_types_Value_proto_rawDescOnce sync.Once - file_feast_types_Value_proto_rawDescData = file_feast_types_Value_proto_rawDesc -) - -func file_feast_types_Value_proto_rawDescGZIP() []byte { - file_feast_types_Value_proto_rawDescOnce.Do(func() { - file_feast_types_Value_proto_rawDescData = protoimpl.X.CompressGZIP(file_feast_types_Value_proto_rawDescData) - }) - return file_feast_types_Value_proto_rawDescData -} - -var file_feast_types_Value_proto_enumTypes = make([]protoimpl.EnumInfo, 1) -var file_feast_types_Value_proto_msgTypes = make([]protoimpl.MessageInfo, 9) -var file_feast_types_Value_proto_goTypes = []interface{}{ - (ValueType_Enum)(0), // 0: feast.types.ValueType.Enum - (*ValueType)(nil), // 1: feast.types.ValueType - (*Value)(nil), // 2: feast.types.Value - (*BytesList)(nil), // 3: feast.types.BytesList - (*StringList)(nil), // 4: feast.types.StringList - (*Int32List)(nil), // 5: feast.types.Int32List - (*Int64List)(nil), // 6: feast.types.Int64List - (*DoubleList)(nil), // 7: feast.types.DoubleList - (*FloatList)(nil), // 8: feast.types.FloatList - (*BoolList)(nil), // 9: feast.types.BoolList -} -var file_feast_types_Value_proto_depIdxs = []int32{ - 3, // 0: feast.types.Value.bytes_list_val:type_name -> feast.types.BytesList - 4, // 1: feast.types.Value.string_list_val:type_name -> feast.types.StringList - 5, // 2: feast.types.Value.int32_list_val:type_name -> feast.types.Int32List - 6, // 3: feast.types.Value.int64_list_val:type_name -> feast.types.Int64List - 7, // 4: feast.types.Value.double_list_val:type_name -> feast.types.DoubleList - 8, // 5: feast.types.Value.float_list_val:type_name -> feast.types.FloatList - 9, // 6: feast.types.Value.bool_list_val:type_name -> feast.types.BoolList - 7, // [7:7] is the sub-list for method output_type - 7, // [7:7] is the sub-list for method input_type - 7, // [7:7] is the sub-list for extension type_name - 7, // [7:7] is the sub-list for extension extendee - 0, // [0:7] is the sub-list for field type_name -} - -func init() { file_feast_types_Value_proto_init() } -func file_feast_types_Value_proto_init() { - if File_feast_types_Value_proto != nil { - return - } - if !protoimpl.UnsafeEnabled { - file_feast_types_Value_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ValueType); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_types_Value_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Value); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_types_Value_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*BytesList); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_types_Value_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*StringList); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_types_Value_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Int32List); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_types_Value_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Int64List); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_types_Value_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DoubleList); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_types_Value_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*FloatList); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_feast_types_Value_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*BoolList); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - file_feast_types_Value_proto_msgTypes[1].OneofWrappers = []interface{}{ - (*Value_BytesVal)(nil), - (*Value_StringVal)(nil), - (*Value_Int32Val)(nil), - (*Value_Int64Val)(nil), - (*Value_DoubleVal)(nil), - (*Value_FloatVal)(nil), - (*Value_BoolVal)(nil), - (*Value_BytesListVal)(nil), - (*Value_StringListVal)(nil), - (*Value_Int32ListVal)(nil), - (*Value_Int64ListVal)(nil), - (*Value_DoubleListVal)(nil), - (*Value_FloatListVal)(nil), - (*Value_BoolListVal)(nil), - } - type x struct{} - out := protoimpl.TypeBuilder{ - File: protoimpl.DescBuilder{ - GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_feast_types_Value_proto_rawDesc, - NumEnums: 1, - NumMessages: 9, - NumExtensions: 0, - NumServices: 0, - }, - GoTypes: file_feast_types_Value_proto_goTypes, - DependencyIndexes: file_feast_types_Value_proto_depIdxs, - EnumInfos: file_feast_types_Value_proto_enumTypes, - MessageInfos: file_feast_types_Value_proto_msgTypes, - }.Build() - File_feast_types_Value_proto = out.File - file_feast_types_Value_proto_rawDesc = nil - file_feast_types_Value_proto_goTypes = nil - file_feast_types_Value_proto_depIdxs = nil -} diff --git a/sdk/go/protos/tensorflow_metadata/proto/v0/path.pb.go b/sdk/go/protos/tensorflow_metadata/proto/v0/path.pb.go deleted file mode 100644 index 1daa7687f9..0000000000 --- a/sdk/go/protos/tensorflow_metadata/proto/v0/path.pb.go +++ /dev/null @@ -1,186 +0,0 @@ -// Copyright 2018 The TensorFlow Authors. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// ============================================================================= - -// Code generated by protoc-gen-go. DO NOT EDIT. -// versions: -// protoc-gen-go v1.25.0 -// protoc v3.12.4 -// source: tensorflow_metadata/proto/v0/path.proto - -package v0 - -import ( - proto "github.com/golang/protobuf/proto" - protoreflect "google.golang.org/protobuf/reflect/protoreflect" - protoimpl "google.golang.org/protobuf/runtime/protoimpl" - reflect "reflect" - sync "sync" -) - -const ( - // Verify that this generated code is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) - // Verify that runtime/protoimpl is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) -) - -// This is a compile-time assertion that a sufficiently up-to-date version -// of the legacy proto package is being used. -const _ = proto.ProtoPackageIsVersion4 - -// A path is a more general substitute for the name of a field or feature that -// can be used for flat examples as well as structured data. For example, if -// we had data in a protocol buffer: -// message Person { -// int age = 1; -// optional string gender = 2; -// repeated Person parent = 3; -// } -// Thus, here the path {step:["parent", "age"]} in statistics would refer to the -// age of a parent, and {step:["parent", "parent", "age"]} would refer to the -// age of a grandparent. This allows us to distinguish between the statistics -// of parents' ages and grandparents' ages. In general, repeated messages are -// to be preferred to linked lists of arbitrary length. -// For SequenceExample, if we have a feature list "foo", this is represented -// by {step:["##SEQUENCE##", "foo"]}. -type Path struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Any string is a valid step. - // However, whenever possible have a step be [A-Za-z0-9_]+. - Step []string `protobuf:"bytes,1,rep,name=step" json:"step,omitempty"` -} - -func (x *Path) Reset() { - *x = Path{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_path_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Path) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Path) ProtoMessage() {} - -func (x *Path) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_path_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Path.ProtoReflect.Descriptor instead. -func (*Path) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_path_proto_rawDescGZIP(), []int{0} -} - -func (x *Path) GetStep() []string { - if x != nil { - return x.Step - } - return nil -} - -var File_tensorflow_metadata_proto_v0_path_proto protoreflect.FileDescriptor - -var file_tensorflow_metadata_proto_v0_path_proto_rawDesc = []byte{ - 0x0a, 0x27, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x6d, 0x65, 0x74, - 0x61, 0x64, 0x61, 0x74, 0x61, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x76, 0x30, 0x2f, 0x70, - 0x61, 0x74, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x16, 0x74, 0x65, 0x6e, 0x73, 0x6f, - 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, - 0x30, 0x22, 0x1a, 0x0a, 0x04, 0x50, 0x61, 0x74, 0x68, 0x12, 0x12, 0x0a, 0x04, 0x73, 0x74, 0x65, - 0x70, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x73, 0x74, 0x65, 0x70, 0x42, 0x68, 0x0a, - 0x1a, 0x6f, 0x72, 0x67, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, - 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x50, 0x01, 0x5a, 0x45, 0x67, - 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2d, - 0x64, 0x65, 0x76, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x73, 0x64, 0x6b, 0x2f, 0x67, 0x6f, - 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, - 0x6f, 0x77, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2f, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x2f, 0x76, 0x30, 0xf8, 0x01, 0x01, -} - -var ( - file_tensorflow_metadata_proto_v0_path_proto_rawDescOnce sync.Once - file_tensorflow_metadata_proto_v0_path_proto_rawDescData = file_tensorflow_metadata_proto_v0_path_proto_rawDesc -) - -func file_tensorflow_metadata_proto_v0_path_proto_rawDescGZIP() []byte { - file_tensorflow_metadata_proto_v0_path_proto_rawDescOnce.Do(func() { - file_tensorflow_metadata_proto_v0_path_proto_rawDescData = protoimpl.X.CompressGZIP(file_tensorflow_metadata_proto_v0_path_proto_rawDescData) - }) - return file_tensorflow_metadata_proto_v0_path_proto_rawDescData -} - -var file_tensorflow_metadata_proto_v0_path_proto_msgTypes = make([]protoimpl.MessageInfo, 1) -var file_tensorflow_metadata_proto_v0_path_proto_goTypes = []interface{}{ - (*Path)(nil), // 0: tensorflow.metadata.v0.Path -} -var file_tensorflow_metadata_proto_v0_path_proto_depIdxs = []int32{ - 0, // [0:0] is the sub-list for method output_type - 0, // [0:0] is the sub-list for method input_type - 0, // [0:0] is the sub-list for extension type_name - 0, // [0:0] is the sub-list for extension extendee - 0, // [0:0] is the sub-list for field type_name -} - -func init() { file_tensorflow_metadata_proto_v0_path_proto_init() } -func file_tensorflow_metadata_proto_v0_path_proto_init() { - if File_tensorflow_metadata_proto_v0_path_proto != nil { - return - } - if !protoimpl.UnsafeEnabled { - file_tensorflow_metadata_proto_v0_path_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Path); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - type x struct{} - out := protoimpl.TypeBuilder{ - File: protoimpl.DescBuilder{ - GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_tensorflow_metadata_proto_v0_path_proto_rawDesc, - NumEnums: 0, - NumMessages: 1, - NumExtensions: 0, - NumServices: 0, - }, - GoTypes: file_tensorflow_metadata_proto_v0_path_proto_goTypes, - DependencyIndexes: file_tensorflow_metadata_proto_v0_path_proto_depIdxs, - MessageInfos: file_tensorflow_metadata_proto_v0_path_proto_msgTypes, - }.Build() - File_tensorflow_metadata_proto_v0_path_proto = out.File - file_tensorflow_metadata_proto_v0_path_proto_rawDesc = nil - file_tensorflow_metadata_proto_v0_path_proto_goTypes = nil - file_tensorflow_metadata_proto_v0_path_proto_depIdxs = nil -} diff --git a/sdk/go/protos/tensorflow_metadata/proto/v0/schema.pb.go b/sdk/go/protos/tensorflow_metadata/proto/v0/schema.pb.go deleted file mode 100644 index 940779a191..0000000000 --- a/sdk/go/protos/tensorflow_metadata/proto/v0/schema.pb.go +++ /dev/null @@ -1,4083 +0,0 @@ -// Copyright 2017 The TensorFlow Authors. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// ============================================================================= - -// Code generated by protoc-gen-go. DO NOT EDIT. -// versions: -// protoc-gen-go v1.25.0 -// protoc v3.12.4 -// source: tensorflow_metadata/proto/v0/schema.proto - -package v0 - -import ( - proto "github.com/golang/protobuf/proto" - any "github.com/golang/protobuf/ptypes/any" - protoreflect "google.golang.org/protobuf/reflect/protoreflect" - protoimpl "google.golang.org/protobuf/runtime/protoimpl" - reflect "reflect" - sync "sync" -) - -const ( - // Verify that this generated code is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) - // Verify that runtime/protoimpl is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) -) - -// This is a compile-time assertion that a sufficiently up-to-date version -// of the legacy proto package is being used. -const _ = proto.ProtoPackageIsVersion4 - -// LifecycleStage. Only UNKNOWN_STAGE, BETA, and PRODUCTION features are -// actually validated. -// PLANNED, ALPHA, and DEBUG are treated as DEPRECATED. -type LifecycleStage int32 - -const ( - LifecycleStage_UNKNOWN_STAGE LifecycleStage = 0 // Unknown stage. - LifecycleStage_PLANNED LifecycleStage = 1 // Planned feature, may not be created yet. - LifecycleStage_ALPHA LifecycleStage = 2 // Prototype feature, not used in experiments yet. - LifecycleStage_BETA LifecycleStage = 3 // Used in user-facing experiments. - LifecycleStage_PRODUCTION LifecycleStage = 4 // Used in a significant fraction of user traffic. - LifecycleStage_DEPRECATED LifecycleStage = 5 // No longer supported: do not use in new models. - LifecycleStage_DEBUG_ONLY LifecycleStage = 6 // Only exists for debugging purposes. -) - -// Enum value maps for LifecycleStage. -var ( - LifecycleStage_name = map[int32]string{ - 0: "UNKNOWN_STAGE", - 1: "PLANNED", - 2: "ALPHA", - 3: "BETA", - 4: "PRODUCTION", - 5: "DEPRECATED", - 6: "DEBUG_ONLY", - } - LifecycleStage_value = map[string]int32{ - "UNKNOWN_STAGE": 0, - "PLANNED": 1, - "ALPHA": 2, - "BETA": 3, - "PRODUCTION": 4, - "DEPRECATED": 5, - "DEBUG_ONLY": 6, - } -) - -func (x LifecycleStage) Enum() *LifecycleStage { - p := new(LifecycleStage) - *p = x - return p -} - -func (x LifecycleStage) String() string { - return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) -} - -func (LifecycleStage) Descriptor() protoreflect.EnumDescriptor { - return file_tensorflow_metadata_proto_v0_schema_proto_enumTypes[0].Descriptor() -} - -func (LifecycleStage) Type() protoreflect.EnumType { - return &file_tensorflow_metadata_proto_v0_schema_proto_enumTypes[0] -} - -func (x LifecycleStage) Number() protoreflect.EnumNumber { - return protoreflect.EnumNumber(x) -} - -// Deprecated: Do not use. -func (x *LifecycleStage) UnmarshalJSON(b []byte) error { - num, err := protoimpl.X.UnmarshalJSONEnum(x.Descriptor(), b) - if err != nil { - return err - } - *x = LifecycleStage(num) - return nil -} - -// Deprecated: Use LifecycleStage.Descriptor instead. -func (LifecycleStage) EnumDescriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_schema_proto_rawDescGZIP(), []int{0} -} - -// Describes the physical representation of a feature. -// It may be different than the logical representation, which -// is represented as a Domain. -type FeatureType int32 - -const ( - FeatureType_TYPE_UNKNOWN FeatureType = 0 - FeatureType_BYTES FeatureType = 1 - FeatureType_INT FeatureType = 2 - FeatureType_FLOAT FeatureType = 3 - FeatureType_STRUCT FeatureType = 4 -) - -// Enum value maps for FeatureType. -var ( - FeatureType_name = map[int32]string{ - 0: "TYPE_UNKNOWN", - 1: "BYTES", - 2: "INT", - 3: "FLOAT", - 4: "STRUCT", - } - FeatureType_value = map[string]int32{ - "TYPE_UNKNOWN": 0, - "BYTES": 1, - "INT": 2, - "FLOAT": 3, - "STRUCT": 4, - } -) - -func (x FeatureType) Enum() *FeatureType { - p := new(FeatureType) - *p = x - return p -} - -func (x FeatureType) String() string { - return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) -} - -func (FeatureType) Descriptor() protoreflect.EnumDescriptor { - return file_tensorflow_metadata_proto_v0_schema_proto_enumTypes[1].Descriptor() -} - -func (FeatureType) Type() protoreflect.EnumType { - return &file_tensorflow_metadata_proto_v0_schema_proto_enumTypes[1] -} - -func (x FeatureType) Number() protoreflect.EnumNumber { - return protoreflect.EnumNumber(x) -} - -// Deprecated: Do not use. -func (x *FeatureType) UnmarshalJSON(b []byte) error { - num, err := protoimpl.X.UnmarshalJSONEnum(x.Descriptor(), b) - if err != nil { - return err - } - *x = FeatureType(num) - return nil -} - -// Deprecated: Use FeatureType.Descriptor instead. -func (FeatureType) EnumDescriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_schema_proto_rawDescGZIP(), []int{1} -} - -type TimeDomain_IntegerTimeFormat int32 - -const ( - TimeDomain_FORMAT_UNKNOWN TimeDomain_IntegerTimeFormat = 0 - TimeDomain_UNIX_DAYS TimeDomain_IntegerTimeFormat = 5 // Number of days since 1970-01-01. - TimeDomain_UNIX_SECONDS TimeDomain_IntegerTimeFormat = 1 - TimeDomain_UNIX_MILLISECONDS TimeDomain_IntegerTimeFormat = 2 - TimeDomain_UNIX_MICROSECONDS TimeDomain_IntegerTimeFormat = 3 - TimeDomain_UNIX_NANOSECONDS TimeDomain_IntegerTimeFormat = 4 -) - -// Enum value maps for TimeDomain_IntegerTimeFormat. -var ( - TimeDomain_IntegerTimeFormat_name = map[int32]string{ - 0: "FORMAT_UNKNOWN", - 5: "UNIX_DAYS", - 1: "UNIX_SECONDS", - 2: "UNIX_MILLISECONDS", - 3: "UNIX_MICROSECONDS", - 4: "UNIX_NANOSECONDS", - } - TimeDomain_IntegerTimeFormat_value = map[string]int32{ - "FORMAT_UNKNOWN": 0, - "UNIX_DAYS": 5, - "UNIX_SECONDS": 1, - "UNIX_MILLISECONDS": 2, - "UNIX_MICROSECONDS": 3, - "UNIX_NANOSECONDS": 4, - } -) - -func (x TimeDomain_IntegerTimeFormat) Enum() *TimeDomain_IntegerTimeFormat { - p := new(TimeDomain_IntegerTimeFormat) - *p = x - return p -} - -func (x TimeDomain_IntegerTimeFormat) String() string { - return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) -} - -func (TimeDomain_IntegerTimeFormat) Descriptor() protoreflect.EnumDescriptor { - return file_tensorflow_metadata_proto_v0_schema_proto_enumTypes[2].Descriptor() -} - -func (TimeDomain_IntegerTimeFormat) Type() protoreflect.EnumType { - return &file_tensorflow_metadata_proto_v0_schema_proto_enumTypes[2] -} - -func (x TimeDomain_IntegerTimeFormat) Number() protoreflect.EnumNumber { - return protoreflect.EnumNumber(x) -} - -// Deprecated: Do not use. -func (x *TimeDomain_IntegerTimeFormat) UnmarshalJSON(b []byte) error { - num, err := protoimpl.X.UnmarshalJSONEnum(x.Descriptor(), b) - if err != nil { - return err - } - *x = TimeDomain_IntegerTimeFormat(num) - return nil -} - -// Deprecated: Use TimeDomain_IntegerTimeFormat.Descriptor instead. -func (TimeDomain_IntegerTimeFormat) EnumDescriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_schema_proto_rawDescGZIP(), []int{19, 0} -} - -type TimeOfDayDomain_IntegerTimeOfDayFormat int32 - -const ( - TimeOfDayDomain_FORMAT_UNKNOWN TimeOfDayDomain_IntegerTimeOfDayFormat = 0 - // Time values, containing hour/minute/second/nanos, encoded into 8-byte - // bit fields following the ZetaSQL convention: - // 6 5 4 3 2 1 - // MSB 3210987654321098765432109876543210987654321098765432109876543210 LSB - // | H || M || S ||---------- nanos -----------| - TimeOfDayDomain_PACKED_64_NANOS TimeOfDayDomain_IntegerTimeOfDayFormat = 1 -) - -// Enum value maps for TimeOfDayDomain_IntegerTimeOfDayFormat. -var ( - TimeOfDayDomain_IntegerTimeOfDayFormat_name = map[int32]string{ - 0: "FORMAT_UNKNOWN", - 1: "PACKED_64_NANOS", - } - TimeOfDayDomain_IntegerTimeOfDayFormat_value = map[string]int32{ - "FORMAT_UNKNOWN": 0, - "PACKED_64_NANOS": 1, - } -) - -func (x TimeOfDayDomain_IntegerTimeOfDayFormat) Enum() *TimeOfDayDomain_IntegerTimeOfDayFormat { - p := new(TimeOfDayDomain_IntegerTimeOfDayFormat) - *p = x - return p -} - -func (x TimeOfDayDomain_IntegerTimeOfDayFormat) String() string { - return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) -} - -func (TimeOfDayDomain_IntegerTimeOfDayFormat) Descriptor() protoreflect.EnumDescriptor { - return file_tensorflow_metadata_proto_v0_schema_proto_enumTypes[3].Descriptor() -} - -func (TimeOfDayDomain_IntegerTimeOfDayFormat) Type() protoreflect.EnumType { - return &file_tensorflow_metadata_proto_v0_schema_proto_enumTypes[3] -} - -func (x TimeOfDayDomain_IntegerTimeOfDayFormat) Number() protoreflect.EnumNumber { - return protoreflect.EnumNumber(x) -} - -// Deprecated: Do not use. -func (x *TimeOfDayDomain_IntegerTimeOfDayFormat) UnmarshalJSON(b []byte) error { - num, err := protoimpl.X.UnmarshalJSONEnum(x.Descriptor(), b) - if err != nil { - return err - } - *x = TimeOfDayDomain_IntegerTimeOfDayFormat(num) - return nil -} - -// Deprecated: Use TimeOfDayDomain_IntegerTimeOfDayFormat.Descriptor instead. -func (TimeOfDayDomain_IntegerTimeOfDayFormat) EnumDescriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_schema_proto_rawDescGZIP(), []int{20, 0} -} - -// -// Message to represent schema information. -// NextID: 14 -type Schema struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Features described in this schema. - Feature []*Feature `protobuf:"bytes,1,rep,name=feature" json:"feature,omitempty"` - // Sparse features described in this schema. - SparseFeature []*SparseFeature `protobuf:"bytes,6,rep,name=sparse_feature,json=sparseFeature" json:"sparse_feature,omitempty"` - // Weighted features described in this schema. - WeightedFeature []*WeightedFeature `protobuf:"bytes,12,rep,name=weighted_feature,json=weightedFeature" json:"weighted_feature,omitempty"` - // declared as top-level features in . - // String domains referenced in the features. - StringDomain []*StringDomain `protobuf:"bytes,4,rep,name=string_domain,json=stringDomain" json:"string_domain,omitempty"` - // top level float domains that can be reused by features - FloatDomain []*FloatDomain `protobuf:"bytes,9,rep,name=float_domain,json=floatDomain" json:"float_domain,omitempty"` - // top level int domains that can be reused by features - IntDomain []*IntDomain `protobuf:"bytes,10,rep,name=int_domain,json=intDomain" json:"int_domain,omitempty"` - // Default environments for each feature. - // An environment represents both a type of location (e.g. a server or phone) - // and a time (e.g. right before model X is run). In the standard scenario, - // 99% of the features should be in the default environments TRAINING, - // SERVING, and the LABEL (or labels) AND WEIGHT is only available at TRAINING - // (not at serving). - // Other possible variations: - // 1. There may be TRAINING_MOBILE, SERVING_MOBILE, TRAINING_SERVICE, - // and SERVING_SERVICE. - // 2. If one is ensembling three models, where the predictions of the first - // three models are available for the ensemble model, there may be - // TRAINING, SERVING_INITIAL, SERVING_ENSEMBLE. - // See FeatureProto::not_in_environment and FeatureProto::in_environment. - DefaultEnvironment []string `protobuf:"bytes,5,rep,name=default_environment,json=defaultEnvironment" json:"default_environment,omitempty"` - // Additional information about the schema as a whole. Features may also - // be annotated individually. - Annotation *Annotation `protobuf:"bytes,8,opt,name=annotation" json:"annotation,omitempty"` - // Dataset-level constraints. This is currently used for specifying - // information about changes in num_examples. - DatasetConstraints *DatasetConstraints `protobuf:"bytes,11,opt,name=dataset_constraints,json=datasetConstraints" json:"dataset_constraints,omitempty"` - // TensorRepresentation groups. The keys are the names of the groups. - // Key "" (empty string) denotes the "default" group, which is what should - // be used when a group name is not provided. - // See the documentation at TensorRepresentationGroup for more info. - // Under development. DO NOT USE. - TensorRepresentationGroup map[string]*TensorRepresentationGroup `protobuf:"bytes,13,rep,name=tensor_representation_group,json=tensorRepresentationGroup" json:"tensor_representation_group,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` -} - -func (x *Schema) Reset() { - *x = Schema{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Schema) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Schema) ProtoMessage() {} - -func (x *Schema) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Schema.ProtoReflect.Descriptor instead. -func (*Schema) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_schema_proto_rawDescGZIP(), []int{0} -} - -func (x *Schema) GetFeature() []*Feature { - if x != nil { - return x.Feature - } - return nil -} - -func (x *Schema) GetSparseFeature() []*SparseFeature { - if x != nil { - return x.SparseFeature - } - return nil -} - -func (x *Schema) GetWeightedFeature() []*WeightedFeature { - if x != nil { - return x.WeightedFeature - } - return nil -} - -func (x *Schema) GetStringDomain() []*StringDomain { - if x != nil { - return x.StringDomain - } - return nil -} - -func (x *Schema) GetFloatDomain() []*FloatDomain { - if x != nil { - return x.FloatDomain - } - return nil -} - -func (x *Schema) GetIntDomain() []*IntDomain { - if x != nil { - return x.IntDomain - } - return nil -} - -func (x *Schema) GetDefaultEnvironment() []string { - if x != nil { - return x.DefaultEnvironment - } - return nil -} - -func (x *Schema) GetAnnotation() *Annotation { - if x != nil { - return x.Annotation - } - return nil -} - -func (x *Schema) GetDatasetConstraints() *DatasetConstraints { - if x != nil { - return x.DatasetConstraints - } - return nil -} - -func (x *Schema) GetTensorRepresentationGroup() map[string]*TensorRepresentationGroup { - if x != nil { - return x.TensorRepresentationGroup - } - return nil -} - -// Describes schema-level information about a specific feature. -// NextID: 31 -type Feature struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // The name of the feature. - Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` // required - // This field is no longer supported. Instead, use: - // lifecycle_stage: DEPRECATED - // TODO(b/111450258): remove this. - // - // Deprecated: Do not use. - Deprecated *bool `protobuf:"varint,2,opt,name=deprecated" json:"deprecated,omitempty"` - // Types that are assignable to PresenceConstraints: - // *Feature_Presence - // *Feature_GroupPresence - PresenceConstraints isFeature_PresenceConstraints `protobuf_oneof:"presence_constraints"` - // The shape of the feature which governs the number of values that appear in - // each example. - // - // Types that are assignable to ShapeType: - // *Feature_Shape - // *Feature_ValueCount - ShapeType isFeature_ShapeType `protobuf_oneof:"shape_type"` - // Physical type of the feature's values. - // Note that you can have: - // type: BYTES - // int_domain: { - // min: 0 - // max: 3 - // } - // This would be a field that is syntactically BYTES (i.e. strings), but - // semantically an int, i.e. it would be "0", "1", "2", or "3". - Type *FeatureType `protobuf:"varint,6,opt,name=type,enum=tensorflow.metadata.v0.FeatureType" json:"type,omitempty"` - // Domain for the values of the feature. - // - // Types that are assignable to DomainInfo: - // *Feature_Domain - // *Feature_IntDomain - // *Feature_FloatDomain - // *Feature_StringDomain - // *Feature_BoolDomain - // *Feature_StructDomain - // *Feature_NaturalLanguageDomain - // *Feature_ImageDomain - // *Feature_MidDomain - // *Feature_UrlDomain - // *Feature_TimeDomain - // *Feature_TimeOfDayDomain - DomainInfo isFeature_DomainInfo `protobuf_oneof:"domain_info"` - // Constraints on the distribution of the feature values. - // Currently only supported for StringDomains. - // TODO(b/69473628): Extend functionality to other domain types. - DistributionConstraints *DistributionConstraints `protobuf:"bytes,15,opt,name=distribution_constraints,json=distributionConstraints" json:"distribution_constraints,omitempty"` - // Additional information about the feature for documentation purpose. - Annotation *Annotation `protobuf:"bytes,16,opt,name=annotation" json:"annotation,omitempty"` - // Tests comparing the distribution to the associated serving data. - SkewComparator *FeatureComparator `protobuf:"bytes,18,opt,name=skew_comparator,json=skewComparator" json:"skew_comparator,omitempty"` - // Tests comparing the distribution between two consecutive spans (e.g. days). - DriftComparator *FeatureComparator `protobuf:"bytes,21,opt,name=drift_comparator,json=driftComparator" json:"drift_comparator,omitempty"` - // List of environments this feature is present in. - // Should be disjoint from not_in_environment. - // This feature is in environment "foo" if: - // ("foo" is in in_environment or default_environments) AND - // "foo" is not in not_in_environment. - // See Schema::default_environments. - InEnvironment []string `protobuf:"bytes,20,rep,name=in_environment,json=inEnvironment" json:"in_environment,omitempty"` - // List of environments this feature is not present in. - // Should be disjoint from of in_environment. - // See Schema::default_environments and in_environment. - NotInEnvironment []string `protobuf:"bytes,19,rep,name=not_in_environment,json=notInEnvironment" json:"not_in_environment,omitempty"` - // The lifecycle stage of a feature. It can also apply to its descendants. - // i.e., if a struct is DEPRECATED, its children are implicitly deprecated. - LifecycleStage *LifecycleStage `protobuf:"varint,22,opt,name=lifecycle_stage,json=lifecycleStage,enum=tensorflow.metadata.v0.LifecycleStage" json:"lifecycle_stage,omitempty"` -} - -func (x *Feature) Reset() { - *x = Feature{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Feature) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Feature) ProtoMessage() {} - -func (x *Feature) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Feature.ProtoReflect.Descriptor instead. -func (*Feature) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_schema_proto_rawDescGZIP(), []int{1} -} - -func (x *Feature) GetName() string { - if x != nil && x.Name != nil { - return *x.Name - } - return "" -} - -// Deprecated: Do not use. -func (x *Feature) GetDeprecated() bool { - if x != nil && x.Deprecated != nil { - return *x.Deprecated - } - return false -} - -func (m *Feature) GetPresenceConstraints() isFeature_PresenceConstraints { - if m != nil { - return m.PresenceConstraints - } - return nil -} - -func (x *Feature) GetPresence() *FeaturePresence { - if x, ok := x.GetPresenceConstraints().(*Feature_Presence); ok { - return x.Presence - } - return nil -} - -func (x *Feature) GetGroupPresence() *FeaturePresenceWithinGroup { - if x, ok := x.GetPresenceConstraints().(*Feature_GroupPresence); ok { - return x.GroupPresence - } - return nil -} - -func (m *Feature) GetShapeType() isFeature_ShapeType { - if m != nil { - return m.ShapeType - } - return nil -} - -func (x *Feature) GetShape() *FixedShape { - if x, ok := x.GetShapeType().(*Feature_Shape); ok { - return x.Shape - } - return nil -} - -func (x *Feature) GetValueCount() *ValueCount { - if x, ok := x.GetShapeType().(*Feature_ValueCount); ok { - return x.ValueCount - } - return nil -} - -func (x *Feature) GetType() FeatureType { - if x != nil && x.Type != nil { - return *x.Type - } - return FeatureType_TYPE_UNKNOWN -} - -func (m *Feature) GetDomainInfo() isFeature_DomainInfo { - if m != nil { - return m.DomainInfo - } - return nil -} - -func (x *Feature) GetDomain() string { - if x, ok := x.GetDomainInfo().(*Feature_Domain); ok { - return x.Domain - } - return "" -} - -func (x *Feature) GetIntDomain() *IntDomain { - if x, ok := x.GetDomainInfo().(*Feature_IntDomain); ok { - return x.IntDomain - } - return nil -} - -func (x *Feature) GetFloatDomain() *FloatDomain { - if x, ok := x.GetDomainInfo().(*Feature_FloatDomain); ok { - return x.FloatDomain - } - return nil -} - -func (x *Feature) GetStringDomain() *StringDomain { - if x, ok := x.GetDomainInfo().(*Feature_StringDomain); ok { - return x.StringDomain - } - return nil -} - -func (x *Feature) GetBoolDomain() *BoolDomain { - if x, ok := x.GetDomainInfo().(*Feature_BoolDomain); ok { - return x.BoolDomain - } - return nil -} - -func (x *Feature) GetStructDomain() *StructDomain { - if x, ok := x.GetDomainInfo().(*Feature_StructDomain); ok { - return x.StructDomain - } - return nil -} - -func (x *Feature) GetNaturalLanguageDomain() *NaturalLanguageDomain { - if x, ok := x.GetDomainInfo().(*Feature_NaturalLanguageDomain); ok { - return x.NaturalLanguageDomain - } - return nil -} - -func (x *Feature) GetImageDomain() *ImageDomain { - if x, ok := x.GetDomainInfo().(*Feature_ImageDomain); ok { - return x.ImageDomain - } - return nil -} - -func (x *Feature) GetMidDomain() *MIDDomain { - if x, ok := x.GetDomainInfo().(*Feature_MidDomain); ok { - return x.MidDomain - } - return nil -} - -func (x *Feature) GetUrlDomain() *URLDomain { - if x, ok := x.GetDomainInfo().(*Feature_UrlDomain); ok { - return x.UrlDomain - } - return nil -} - -func (x *Feature) GetTimeDomain() *TimeDomain { - if x, ok := x.GetDomainInfo().(*Feature_TimeDomain); ok { - return x.TimeDomain - } - return nil -} - -func (x *Feature) GetTimeOfDayDomain() *TimeOfDayDomain { - if x, ok := x.GetDomainInfo().(*Feature_TimeOfDayDomain); ok { - return x.TimeOfDayDomain - } - return nil -} - -func (x *Feature) GetDistributionConstraints() *DistributionConstraints { - if x != nil { - return x.DistributionConstraints - } - return nil -} - -func (x *Feature) GetAnnotation() *Annotation { - if x != nil { - return x.Annotation - } - return nil -} - -func (x *Feature) GetSkewComparator() *FeatureComparator { - if x != nil { - return x.SkewComparator - } - return nil -} - -func (x *Feature) GetDriftComparator() *FeatureComparator { - if x != nil { - return x.DriftComparator - } - return nil -} - -func (x *Feature) GetInEnvironment() []string { - if x != nil { - return x.InEnvironment - } - return nil -} - -func (x *Feature) GetNotInEnvironment() []string { - if x != nil { - return x.NotInEnvironment - } - return nil -} - -func (x *Feature) GetLifecycleStage() LifecycleStage { - if x != nil && x.LifecycleStage != nil { - return *x.LifecycleStage - } - return LifecycleStage_UNKNOWN_STAGE -} - -type isFeature_PresenceConstraints interface { - isFeature_PresenceConstraints() -} - -type Feature_Presence struct { - // Constraints on the presence of this feature in the examples. - Presence *FeaturePresence `protobuf:"bytes,14,opt,name=presence,oneof"` -} - -type Feature_GroupPresence struct { - // Only used in the context of a "group" context, e.g., inside a sequence. - GroupPresence *FeaturePresenceWithinGroup `protobuf:"bytes,17,opt,name=group_presence,json=groupPresence,oneof"` -} - -func (*Feature_Presence) isFeature_PresenceConstraints() {} - -func (*Feature_GroupPresence) isFeature_PresenceConstraints() {} - -type isFeature_ShapeType interface { - isFeature_ShapeType() -} - -type Feature_Shape struct { - // The feature has a fixed shape corresponding to a multi-dimensional - // tensor. - Shape *FixedShape `protobuf:"bytes,23,opt,name=shape,oneof"` -} - -type Feature_ValueCount struct { - // The feature doesn't have a well defined shape. All we know are limits on - // the minimum and maximum number of values. - ValueCount *ValueCount `protobuf:"bytes,5,opt,name=value_count,json=valueCount,oneof"` -} - -func (*Feature_Shape) isFeature_ShapeType() {} - -func (*Feature_ValueCount) isFeature_ShapeType() {} - -type isFeature_DomainInfo interface { - isFeature_DomainInfo() -} - -type Feature_Domain struct { - // Reference to a domain defined at the schema level. - Domain string `protobuf:"bytes,7,opt,name=domain,oneof"` -} - -type Feature_IntDomain struct { - // Inline definitions of domains. - IntDomain *IntDomain `protobuf:"bytes,9,opt,name=int_domain,json=intDomain,oneof"` -} - -type Feature_FloatDomain struct { - FloatDomain *FloatDomain `protobuf:"bytes,10,opt,name=float_domain,json=floatDomain,oneof"` -} - -type Feature_StringDomain struct { - StringDomain *StringDomain `protobuf:"bytes,11,opt,name=string_domain,json=stringDomain,oneof"` -} - -type Feature_BoolDomain struct { - BoolDomain *BoolDomain `protobuf:"bytes,13,opt,name=bool_domain,json=boolDomain,oneof"` -} - -type Feature_StructDomain struct { - StructDomain *StructDomain `protobuf:"bytes,29,opt,name=struct_domain,json=structDomain,oneof"` -} - -type Feature_NaturalLanguageDomain struct { - // Supported semantic domains. - NaturalLanguageDomain *NaturalLanguageDomain `protobuf:"bytes,24,opt,name=natural_language_domain,json=naturalLanguageDomain,oneof"` -} - -type Feature_ImageDomain struct { - ImageDomain *ImageDomain `protobuf:"bytes,25,opt,name=image_domain,json=imageDomain,oneof"` -} - -type Feature_MidDomain struct { - MidDomain *MIDDomain `protobuf:"bytes,26,opt,name=mid_domain,json=midDomain,oneof"` -} - -type Feature_UrlDomain struct { - UrlDomain *URLDomain `protobuf:"bytes,27,opt,name=url_domain,json=urlDomain,oneof"` -} - -type Feature_TimeDomain struct { - TimeDomain *TimeDomain `protobuf:"bytes,28,opt,name=time_domain,json=timeDomain,oneof"` -} - -type Feature_TimeOfDayDomain struct { - TimeOfDayDomain *TimeOfDayDomain `protobuf:"bytes,30,opt,name=time_of_day_domain,json=timeOfDayDomain,oneof"` -} - -func (*Feature_Domain) isFeature_DomainInfo() {} - -func (*Feature_IntDomain) isFeature_DomainInfo() {} - -func (*Feature_FloatDomain) isFeature_DomainInfo() {} - -func (*Feature_StringDomain) isFeature_DomainInfo() {} - -func (*Feature_BoolDomain) isFeature_DomainInfo() {} - -func (*Feature_StructDomain) isFeature_DomainInfo() {} - -func (*Feature_NaturalLanguageDomain) isFeature_DomainInfo() {} - -func (*Feature_ImageDomain) isFeature_DomainInfo() {} - -func (*Feature_MidDomain) isFeature_DomainInfo() {} - -func (*Feature_UrlDomain) isFeature_DomainInfo() {} - -func (*Feature_TimeDomain) isFeature_DomainInfo() {} - -func (*Feature_TimeOfDayDomain) isFeature_DomainInfo() {} - -// Additional information about the schema or about a feature. -type Annotation struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Tags can be used to mark features. For example, tag on user_age feature can - // be `user_feature`, tag on user_country feature can be `location_feature`, - // `user_feature`. - Tag []string `protobuf:"bytes,1,rep,name=tag" json:"tag,omitempty"` - // Free-text comments. This can be used as a description of the feature, - // developer notes etc. - Comment []string `protobuf:"bytes,2,rep,name=comment" json:"comment,omitempty"` - // Application-specific metadata may be attached here. - ExtraMetadata []*any.Any `protobuf:"bytes,3,rep,name=extra_metadata,json=extraMetadata" json:"extra_metadata,omitempty"` -} - -func (x *Annotation) Reset() { - *x = Annotation{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Annotation) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Annotation) ProtoMessage() {} - -func (x *Annotation) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Annotation.ProtoReflect.Descriptor instead. -func (*Annotation) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_schema_proto_rawDescGZIP(), []int{2} -} - -func (x *Annotation) GetTag() []string { - if x != nil { - return x.Tag - } - return nil -} - -func (x *Annotation) GetComment() []string { - if x != nil { - return x.Comment - } - return nil -} - -func (x *Annotation) GetExtraMetadata() []*any.Any { - if x != nil { - return x.ExtraMetadata - } - return nil -} - -// Checks that the ratio of the current value to the previous value is not below -// the min_fraction_threshold or above the max_fraction_threshold. That is, -// previous value * min_fraction_threshold <= current value <= -// previous value * max_fraction_threshold. -// To specify that the value cannot change, set both min_fraction_threshold and -// max_fraction_threshold to 1.0. -type NumericValueComparator struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - MinFractionThreshold *float64 `protobuf:"fixed64,1,opt,name=min_fraction_threshold,json=minFractionThreshold" json:"min_fraction_threshold,omitempty"` - MaxFractionThreshold *float64 `protobuf:"fixed64,2,opt,name=max_fraction_threshold,json=maxFractionThreshold" json:"max_fraction_threshold,omitempty"` -} - -func (x *NumericValueComparator) Reset() { - *x = NumericValueComparator{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[3] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *NumericValueComparator) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*NumericValueComparator) ProtoMessage() {} - -func (x *NumericValueComparator) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[3] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use NumericValueComparator.ProtoReflect.Descriptor instead. -func (*NumericValueComparator) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_schema_proto_rawDescGZIP(), []int{3} -} - -func (x *NumericValueComparator) GetMinFractionThreshold() float64 { - if x != nil && x.MinFractionThreshold != nil { - return *x.MinFractionThreshold - } - return 0 -} - -func (x *NumericValueComparator) GetMaxFractionThreshold() float64 { - if x != nil && x.MaxFractionThreshold != nil { - return *x.MaxFractionThreshold - } - return 0 -} - -// Constraints on the entire dataset. -type DatasetConstraints struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Tests differences in number of examples between the current data and the - // previous span. - NumExamplesDriftComparator *NumericValueComparator `protobuf:"bytes,1,opt,name=num_examples_drift_comparator,json=numExamplesDriftComparator" json:"num_examples_drift_comparator,omitempty"` - // Tests comparisions in number of examples between the current data and the - // previous version of that data. - NumExamplesVersionComparator *NumericValueComparator `protobuf:"bytes,2,opt,name=num_examples_version_comparator,json=numExamplesVersionComparator" json:"num_examples_version_comparator,omitempty"` - // Minimum number of examples in the dataset. - MinExamplesCount *int64 `protobuf:"varint,3,opt,name=min_examples_count,json=minExamplesCount" json:"min_examples_count,omitempty"` -} - -func (x *DatasetConstraints) Reset() { - *x = DatasetConstraints{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[4] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *DatasetConstraints) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*DatasetConstraints) ProtoMessage() {} - -func (x *DatasetConstraints) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[4] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use DatasetConstraints.ProtoReflect.Descriptor instead. -func (*DatasetConstraints) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_schema_proto_rawDescGZIP(), []int{4} -} - -func (x *DatasetConstraints) GetNumExamplesDriftComparator() *NumericValueComparator { - if x != nil { - return x.NumExamplesDriftComparator - } - return nil -} - -func (x *DatasetConstraints) GetNumExamplesVersionComparator() *NumericValueComparator { - if x != nil { - return x.NumExamplesVersionComparator - } - return nil -} - -func (x *DatasetConstraints) GetMinExamplesCount() int64 { - if x != nil && x.MinExamplesCount != nil { - return *x.MinExamplesCount - } - return 0 -} - -// Specifies a fixed shape for the feature's values. The immediate implication -// is that each feature has a fixed number of values. Moreover, these values -// can be parsed in a multi-dimensional tensor using the specified axis sizes. -// The FixedShape defines a lexicographical ordering of the data. For instance, -// if there is a FixedShape { -// dim {size:3} dim {size:2} -// } -// then tensor[0][0]=field[0] -// then tensor[0][1]=field[1] -// then tensor[1][0]=field[2] -// then tensor[1][1]=field[3] -// then tensor[2][0]=field[4] -// then tensor[2][1]=field[5] -// -// The FixedShape message is identical with the TensorFlow TensorShape proto -// message. -type FixedShape struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // The dimensions that define the shape. The total number of values in each - // example is the product of sizes of each dimension. - Dim []*FixedShape_Dim `protobuf:"bytes,2,rep,name=dim" json:"dim,omitempty"` -} - -func (x *FixedShape) Reset() { - *x = FixedShape{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[5] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *FixedShape) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*FixedShape) ProtoMessage() {} - -func (x *FixedShape) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[5] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use FixedShape.ProtoReflect.Descriptor instead. -func (*FixedShape) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_schema_proto_rawDescGZIP(), []int{5} -} - -func (x *FixedShape) GetDim() []*FixedShape_Dim { - if x != nil { - return x.Dim - } - return nil -} - -// Limits on maximum and minimum number of values in a -// single example (when the feature is present). Use this when the minimum -// value count can be different than the maximum value count. Otherwise prefer -// FixedShape. -type ValueCount struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Min *int64 `protobuf:"varint,1,opt,name=min" json:"min,omitempty"` - Max *int64 `protobuf:"varint,2,opt,name=max" json:"max,omitempty"` -} - -func (x *ValueCount) Reset() { - *x = ValueCount{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[6] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *ValueCount) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*ValueCount) ProtoMessage() {} - -func (x *ValueCount) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[6] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use ValueCount.ProtoReflect.Descriptor instead. -func (*ValueCount) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_schema_proto_rawDescGZIP(), []int{6} -} - -func (x *ValueCount) GetMin() int64 { - if x != nil && x.Min != nil { - return *x.Min - } - return 0 -} - -func (x *ValueCount) GetMax() int64 { - if x != nil && x.Max != nil { - return *x.Max - } - return 0 -} - -// Represents a weighted feature that is encoded as a combination of raw base -// features. The `weight_feature` should be a float feature with identical -// shape as the `feature`. This is useful for representing weights associated -// with categorical tokens (e.g. a TFIDF weight associated with each token). -// TODO(b/142122960): Handle WeightedCategorical end to end in TFX (validation, -// TFX Unit Testing, etc) -type WeightedFeature struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Name for the weighted feature. This should not clash with other features in - // the same schema. - Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` // required - // Path of a base feature to be weighted. Required. - Feature *Path `protobuf:"bytes,2,opt,name=feature" json:"feature,omitempty"` - // Path of weight feature to associate with the base feature. Must be same - // shape as feature. Required. - WeightFeature *Path `protobuf:"bytes,3,opt,name=weight_feature,json=weightFeature" json:"weight_feature,omitempty"` - // The lifecycle_stage determines where a feature is expected to be used, - // and therefore how important issues with it are. - LifecycleStage *LifecycleStage `protobuf:"varint,4,opt,name=lifecycle_stage,json=lifecycleStage,enum=tensorflow.metadata.v0.LifecycleStage" json:"lifecycle_stage,omitempty"` -} - -func (x *WeightedFeature) Reset() { - *x = WeightedFeature{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[7] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *WeightedFeature) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*WeightedFeature) ProtoMessage() {} - -func (x *WeightedFeature) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[7] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use WeightedFeature.ProtoReflect.Descriptor instead. -func (*WeightedFeature) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_schema_proto_rawDescGZIP(), []int{7} -} - -func (x *WeightedFeature) GetName() string { - if x != nil && x.Name != nil { - return *x.Name - } - return "" -} - -func (x *WeightedFeature) GetFeature() *Path { - if x != nil { - return x.Feature - } - return nil -} - -func (x *WeightedFeature) GetWeightFeature() *Path { - if x != nil { - return x.WeightFeature - } - return nil -} - -func (x *WeightedFeature) GetLifecycleStage() LifecycleStage { - if x != nil && x.LifecycleStage != nil { - return *x.LifecycleStage - } - return LifecycleStage_UNKNOWN_STAGE -} - -// A sparse feature represents a sparse tensor that is encoded with a -// combination of raw features, namely index features and a value feature. Each -// index feature defines a list of indices in a different dimension. -type SparseFeature struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Name for the sparse feature. This should not clash with other features in - // the same schema. - Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` // required - // This field is no longer supported. Instead, use: - // lifecycle_stage: DEPRECATED - // TODO(b/111450258): remove this. - // - // Deprecated: Do not use. - Deprecated *bool `protobuf:"varint,2,opt,name=deprecated" json:"deprecated,omitempty"` - // The lifecycle_stage determines where a feature is expected to be used, - // and therefore how important issues with it are. - LifecycleStage *LifecycleStage `protobuf:"varint,7,opt,name=lifecycle_stage,json=lifecycleStage,enum=tensorflow.metadata.v0.LifecycleStage" json:"lifecycle_stage,omitempty"` - // Constraints on the presence of this feature in examples. - // Deprecated, this is inferred by the referred features. - // - // Deprecated: Do not use. - Presence *FeaturePresence `protobuf:"bytes,4,opt,name=presence" json:"presence,omitempty"` - // Shape of the sparse tensor that this SparseFeature represents. - // Currently not supported. - // TODO(b/109669962): Consider deriving this from the referred features. - DenseShape *FixedShape `protobuf:"bytes,5,opt,name=dense_shape,json=denseShape" json:"dense_shape,omitempty"` - // Features that represent indexes. Should be integers >= 0. - IndexFeature []*SparseFeature_IndexFeature `protobuf:"bytes,6,rep,name=index_feature,json=indexFeature" json:"index_feature,omitempty"` // at least one - // If true then the index values are already sorted lexicographically. - IsSorted *bool `protobuf:"varint,8,opt,name=is_sorted,json=isSorted" json:"is_sorted,omitempty"` - ValueFeature *SparseFeature_ValueFeature `protobuf:"bytes,9,opt,name=value_feature,json=valueFeature" json:"value_feature,omitempty"` // required - // Type of value feature. - // Deprecated, this is inferred by the referred features. - // - // Deprecated: Do not use. - Type *FeatureType `protobuf:"varint,10,opt,name=type,enum=tensorflow.metadata.v0.FeatureType" json:"type,omitempty"` -} - -func (x *SparseFeature) Reset() { - *x = SparseFeature{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[8] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *SparseFeature) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*SparseFeature) ProtoMessage() {} - -func (x *SparseFeature) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[8] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use SparseFeature.ProtoReflect.Descriptor instead. -func (*SparseFeature) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_schema_proto_rawDescGZIP(), []int{8} -} - -func (x *SparseFeature) GetName() string { - if x != nil && x.Name != nil { - return *x.Name - } - return "" -} - -// Deprecated: Do not use. -func (x *SparseFeature) GetDeprecated() bool { - if x != nil && x.Deprecated != nil { - return *x.Deprecated - } - return false -} - -func (x *SparseFeature) GetLifecycleStage() LifecycleStage { - if x != nil && x.LifecycleStage != nil { - return *x.LifecycleStage - } - return LifecycleStage_UNKNOWN_STAGE -} - -// Deprecated: Do not use. -func (x *SparseFeature) GetPresence() *FeaturePresence { - if x != nil { - return x.Presence - } - return nil -} - -func (x *SparseFeature) GetDenseShape() *FixedShape { - if x != nil { - return x.DenseShape - } - return nil -} - -func (x *SparseFeature) GetIndexFeature() []*SparseFeature_IndexFeature { - if x != nil { - return x.IndexFeature - } - return nil -} - -func (x *SparseFeature) GetIsSorted() bool { - if x != nil && x.IsSorted != nil { - return *x.IsSorted - } - return false -} - -func (x *SparseFeature) GetValueFeature() *SparseFeature_ValueFeature { - if x != nil { - return x.ValueFeature - } - return nil -} - -// Deprecated: Do not use. -func (x *SparseFeature) GetType() FeatureType { - if x != nil && x.Type != nil { - return *x.Type - } - return FeatureType_TYPE_UNKNOWN -} - -// Models constraints on the distribution of a feature's values. -// TODO(martinz): replace min_domain_mass with max_off_domain (but slowly). -type DistributionConstraints struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // The minimum fraction (in [0,1]) of values across all examples that - // should come from the feature's domain, e.g.: - // 1.0 => All values must come from the domain. - // .9 => At least 90% of the values must come from the domain. - MinDomainMass *float64 `protobuf:"fixed64,1,opt,name=min_domain_mass,json=minDomainMass,def=1" json:"min_domain_mass,omitempty"` -} - -// Default values for DistributionConstraints fields. -const ( - Default_DistributionConstraints_MinDomainMass = float64(1) -) - -func (x *DistributionConstraints) Reset() { - *x = DistributionConstraints{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[9] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *DistributionConstraints) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*DistributionConstraints) ProtoMessage() {} - -func (x *DistributionConstraints) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[9] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use DistributionConstraints.ProtoReflect.Descriptor instead. -func (*DistributionConstraints) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_schema_proto_rawDescGZIP(), []int{9} -} - -func (x *DistributionConstraints) GetMinDomainMass() float64 { - if x != nil && x.MinDomainMass != nil { - return *x.MinDomainMass - } - return Default_DistributionConstraints_MinDomainMass -} - -// Encodes information for domains of integer values. -// Note that FeatureType could be either INT or BYTES. -type IntDomain struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Id of the domain. Required if the domain is defined at the schema level. If - // so, then the name must be unique within the schema. - Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - // Min and max values for the domain. - Min *int64 `protobuf:"varint,3,opt,name=min" json:"min,omitempty"` - Max *int64 `protobuf:"varint,4,opt,name=max" json:"max,omitempty"` - // If true then the domain encodes categorical values (i.e., ids) rather than - // ordinal values. - IsCategorical *bool `protobuf:"varint,5,opt,name=is_categorical,json=isCategorical" json:"is_categorical,omitempty"` -} - -func (x *IntDomain) Reset() { - *x = IntDomain{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[10] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *IntDomain) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*IntDomain) ProtoMessage() {} - -func (x *IntDomain) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[10] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use IntDomain.ProtoReflect.Descriptor instead. -func (*IntDomain) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_schema_proto_rawDescGZIP(), []int{10} -} - -func (x *IntDomain) GetName() string { - if x != nil && x.Name != nil { - return *x.Name - } - return "" -} - -func (x *IntDomain) GetMin() int64 { - if x != nil && x.Min != nil { - return *x.Min - } - return 0 -} - -func (x *IntDomain) GetMax() int64 { - if x != nil && x.Max != nil { - return *x.Max - } - return 0 -} - -func (x *IntDomain) GetIsCategorical() bool { - if x != nil && x.IsCategorical != nil { - return *x.IsCategorical - } - return false -} - -// Encodes information for domains of float values. -// Note that FeatureType could be either INT or BYTES. -type FloatDomain struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Id of the domain. Required if the domain is defined at the schema level. If - // so, then the name must be unique within the schema. - Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - // Min and max values of the domain. - Min *float32 `protobuf:"fixed32,3,opt,name=min" json:"min,omitempty"` - Max *float32 `protobuf:"fixed32,4,opt,name=max" json:"max,omitempty"` -} - -func (x *FloatDomain) Reset() { - *x = FloatDomain{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[11] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *FloatDomain) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*FloatDomain) ProtoMessage() {} - -func (x *FloatDomain) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[11] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use FloatDomain.ProtoReflect.Descriptor instead. -func (*FloatDomain) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_schema_proto_rawDescGZIP(), []int{11} -} - -func (x *FloatDomain) GetName() string { - if x != nil && x.Name != nil { - return *x.Name - } - return "" -} - -func (x *FloatDomain) GetMin() float32 { - if x != nil && x.Min != nil { - return *x.Min - } - return 0 -} - -func (x *FloatDomain) GetMax() float32 { - if x != nil && x.Max != nil { - return *x.Max - } - return 0 -} - -// Domain for a recursive struct. -// NOTE: If a feature with a StructDomain is deprecated, then all the -// child features (features and sparse_features of the StructDomain) are also -// considered to be deprecated. Similarly child features can only be in -// environments of the parent feature. -type StructDomain struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Feature []*Feature `protobuf:"bytes,1,rep,name=feature" json:"feature,omitempty"` - SparseFeature []*SparseFeature `protobuf:"bytes,2,rep,name=sparse_feature,json=sparseFeature" json:"sparse_feature,omitempty"` -} - -func (x *StructDomain) Reset() { - *x = StructDomain{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[12] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *StructDomain) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*StructDomain) ProtoMessage() {} - -func (x *StructDomain) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[12] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use StructDomain.ProtoReflect.Descriptor instead. -func (*StructDomain) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_schema_proto_rawDescGZIP(), []int{12} -} - -func (x *StructDomain) GetFeature() []*Feature { - if x != nil { - return x.Feature - } - return nil -} - -func (x *StructDomain) GetSparseFeature() []*SparseFeature { - if x != nil { - return x.SparseFeature - } - return nil -} - -// Encodes information for domains of string values. -type StringDomain struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Id of the domain. Required if the domain is defined at the schema level. If - // so, then the name must be unique within the schema. - Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - // The values appearing in the domain. - Value []string `protobuf:"bytes,2,rep,name=value" json:"value,omitempty"` -} - -func (x *StringDomain) Reset() { - *x = StringDomain{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[13] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *StringDomain) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*StringDomain) ProtoMessage() {} - -func (x *StringDomain) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[13] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use StringDomain.ProtoReflect.Descriptor instead. -func (*StringDomain) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_schema_proto_rawDescGZIP(), []int{13} -} - -func (x *StringDomain) GetName() string { - if x != nil && x.Name != nil { - return *x.Name - } - return "" -} - -func (x *StringDomain) GetValue() []string { - if x != nil { - return x.Value - } - return nil -} - -// Encodes information about the domain of a boolean attribute that encodes its -// TRUE/FALSE values as strings, or 0=false, 1=true. -// Note that FeatureType could be either INT or BYTES. -type BoolDomain struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Id of the domain. Required if the domain is defined at the schema level. If - // so, then the name must be unique within the schema. - Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - // Strings values for TRUE/FALSE. - TrueValue *string `protobuf:"bytes,2,opt,name=true_value,json=trueValue" json:"true_value,omitempty"` - FalseValue *string `protobuf:"bytes,3,opt,name=false_value,json=falseValue" json:"false_value,omitempty"` -} - -func (x *BoolDomain) Reset() { - *x = BoolDomain{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[14] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *BoolDomain) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*BoolDomain) ProtoMessage() {} - -func (x *BoolDomain) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[14] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use BoolDomain.ProtoReflect.Descriptor instead. -func (*BoolDomain) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_schema_proto_rawDescGZIP(), []int{14} -} - -func (x *BoolDomain) GetName() string { - if x != nil && x.Name != nil { - return *x.Name - } - return "" -} - -func (x *BoolDomain) GetTrueValue() string { - if x != nil && x.TrueValue != nil { - return *x.TrueValue - } - return "" -} - -func (x *BoolDomain) GetFalseValue() string { - if x != nil && x.FalseValue != nil { - return *x.FalseValue - } - return "" -} - -// Natural language text. -type NaturalLanguageDomain struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields -} - -func (x *NaturalLanguageDomain) Reset() { - *x = NaturalLanguageDomain{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[15] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *NaturalLanguageDomain) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*NaturalLanguageDomain) ProtoMessage() {} - -func (x *NaturalLanguageDomain) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[15] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use NaturalLanguageDomain.ProtoReflect.Descriptor instead. -func (*NaturalLanguageDomain) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_schema_proto_rawDescGZIP(), []int{15} -} - -// Image data. -type ImageDomain struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields -} - -func (x *ImageDomain) Reset() { - *x = ImageDomain{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[16] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *ImageDomain) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*ImageDomain) ProtoMessage() {} - -func (x *ImageDomain) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[16] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use ImageDomain.ProtoReflect.Descriptor instead. -func (*ImageDomain) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_schema_proto_rawDescGZIP(), []int{16} -} - -// Knowledge graph ID, see: https://www.wikidata.org/wiki/Property:P646 -type MIDDomain struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields -} - -func (x *MIDDomain) Reset() { - *x = MIDDomain{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[17] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *MIDDomain) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*MIDDomain) ProtoMessage() {} - -func (x *MIDDomain) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[17] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use MIDDomain.ProtoReflect.Descriptor instead. -func (*MIDDomain) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_schema_proto_rawDescGZIP(), []int{17} -} - -// A URL, see: https://en.wikipedia.org/wiki/URL -type URLDomain struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields -} - -func (x *URLDomain) Reset() { - *x = URLDomain{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[18] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *URLDomain) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*URLDomain) ProtoMessage() {} - -func (x *URLDomain) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[18] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use URLDomain.ProtoReflect.Descriptor instead. -func (*URLDomain) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_schema_proto_rawDescGZIP(), []int{18} -} - -// Time or date representation. -type TimeDomain struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Types that are assignable to Format: - // *TimeDomain_StringFormat - // *TimeDomain_IntegerFormat - Format isTimeDomain_Format `protobuf_oneof:"format"` -} - -func (x *TimeDomain) Reset() { - *x = TimeDomain{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[19] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *TimeDomain) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*TimeDomain) ProtoMessage() {} - -func (x *TimeDomain) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[19] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use TimeDomain.ProtoReflect.Descriptor instead. -func (*TimeDomain) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_schema_proto_rawDescGZIP(), []int{19} -} - -func (m *TimeDomain) GetFormat() isTimeDomain_Format { - if m != nil { - return m.Format - } - return nil -} - -func (x *TimeDomain) GetStringFormat() string { - if x, ok := x.GetFormat().(*TimeDomain_StringFormat); ok { - return x.StringFormat - } - return "" -} - -func (x *TimeDomain) GetIntegerFormat() TimeDomain_IntegerTimeFormat { - if x, ok := x.GetFormat().(*TimeDomain_IntegerFormat); ok { - return x.IntegerFormat - } - return TimeDomain_FORMAT_UNKNOWN -} - -type isTimeDomain_Format interface { - isTimeDomain_Format() -} - -type TimeDomain_StringFormat struct { - // Expected format that contains a combination of regular characters and - // special format specifiers. Format specifiers are a subset of the - // strptime standard. - StringFormat string `protobuf:"bytes,1,opt,name=string_format,json=stringFormat,oneof"` -} - -type TimeDomain_IntegerFormat struct { - // Expected format of integer times. - IntegerFormat TimeDomain_IntegerTimeFormat `protobuf:"varint,2,opt,name=integer_format,json=integerFormat,enum=tensorflow.metadata.v0.TimeDomain_IntegerTimeFormat,oneof"` -} - -func (*TimeDomain_StringFormat) isTimeDomain_Format() {} - -func (*TimeDomain_IntegerFormat) isTimeDomain_Format() {} - -// Time of day, without a particular date. -type TimeOfDayDomain struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Types that are assignable to Format: - // *TimeOfDayDomain_StringFormat - // *TimeOfDayDomain_IntegerFormat - Format isTimeOfDayDomain_Format `protobuf_oneof:"format"` -} - -func (x *TimeOfDayDomain) Reset() { - *x = TimeOfDayDomain{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[20] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *TimeOfDayDomain) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*TimeOfDayDomain) ProtoMessage() {} - -func (x *TimeOfDayDomain) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[20] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use TimeOfDayDomain.ProtoReflect.Descriptor instead. -func (*TimeOfDayDomain) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_schema_proto_rawDescGZIP(), []int{20} -} - -func (m *TimeOfDayDomain) GetFormat() isTimeOfDayDomain_Format { - if m != nil { - return m.Format - } - return nil -} - -func (x *TimeOfDayDomain) GetStringFormat() string { - if x, ok := x.GetFormat().(*TimeOfDayDomain_StringFormat); ok { - return x.StringFormat - } - return "" -} - -func (x *TimeOfDayDomain) GetIntegerFormat() TimeOfDayDomain_IntegerTimeOfDayFormat { - if x, ok := x.GetFormat().(*TimeOfDayDomain_IntegerFormat); ok { - return x.IntegerFormat - } - return TimeOfDayDomain_FORMAT_UNKNOWN -} - -type isTimeOfDayDomain_Format interface { - isTimeOfDayDomain_Format() -} - -type TimeOfDayDomain_StringFormat struct { - // Expected format that contains a combination of regular characters and - // special format specifiers. Format specifiers are a subset of the - // strptime standard. - StringFormat string `protobuf:"bytes,1,opt,name=string_format,json=stringFormat,oneof"` -} - -type TimeOfDayDomain_IntegerFormat struct { - // Expected format of integer times. - IntegerFormat TimeOfDayDomain_IntegerTimeOfDayFormat `protobuf:"varint,2,opt,name=integer_format,json=integerFormat,enum=tensorflow.metadata.v0.TimeOfDayDomain_IntegerTimeOfDayFormat,oneof"` -} - -func (*TimeOfDayDomain_StringFormat) isTimeOfDayDomain_Format() {} - -func (*TimeOfDayDomain_IntegerFormat) isTimeOfDayDomain_Format() {} - -// Describes constraints on the presence of the feature in the data. -type FeaturePresence struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Minimum fraction of examples that have this feature. - MinFraction *float64 `protobuf:"fixed64,1,opt,name=min_fraction,json=minFraction" json:"min_fraction,omitempty"` - // Minimum number of examples that have this feature. - MinCount *int64 `protobuf:"varint,2,opt,name=min_count,json=minCount" json:"min_count,omitempty"` -} - -func (x *FeaturePresence) Reset() { - *x = FeaturePresence{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[21] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *FeaturePresence) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*FeaturePresence) ProtoMessage() {} - -func (x *FeaturePresence) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[21] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use FeaturePresence.ProtoReflect.Descriptor instead. -func (*FeaturePresence) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_schema_proto_rawDescGZIP(), []int{21} -} - -func (x *FeaturePresence) GetMinFraction() float64 { - if x != nil && x.MinFraction != nil { - return *x.MinFraction - } - return 0 -} - -func (x *FeaturePresence) GetMinCount() int64 { - if x != nil && x.MinCount != nil { - return *x.MinCount - } - return 0 -} - -// Records constraints on the presence of a feature inside a "group" context -// (e.g., .presence inside a group of features that define a sequence). -type FeaturePresenceWithinGroup struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Required *bool `protobuf:"varint,1,opt,name=required" json:"required,omitempty"` -} - -func (x *FeaturePresenceWithinGroup) Reset() { - *x = FeaturePresenceWithinGroup{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[22] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *FeaturePresenceWithinGroup) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*FeaturePresenceWithinGroup) ProtoMessage() {} - -func (x *FeaturePresenceWithinGroup) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[22] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use FeaturePresenceWithinGroup.ProtoReflect.Descriptor instead. -func (*FeaturePresenceWithinGroup) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_schema_proto_rawDescGZIP(), []int{22} -} - -func (x *FeaturePresenceWithinGroup) GetRequired() bool { - if x != nil && x.Required != nil { - return *x.Required - } - return false -} - -// Checks that the L-infinity norm is below a certain threshold between the -// two discrete distributions. Since this is applied to a FeatureNameStatistics, -// it only considers the top k. -// L_infty(p,q) = max_i |p_i-q_i| -type InfinityNorm struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // The InfinityNorm is in the interval [0.0, 1.0] so sensible bounds should - // be in the interval [0.0, 1.0). - Threshold *float64 `protobuf:"fixed64,1,opt,name=threshold" json:"threshold,omitempty"` -} - -func (x *InfinityNorm) Reset() { - *x = InfinityNorm{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[23] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *InfinityNorm) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*InfinityNorm) ProtoMessage() {} - -func (x *InfinityNorm) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[23] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use InfinityNorm.ProtoReflect.Descriptor instead. -func (*InfinityNorm) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_schema_proto_rawDescGZIP(), []int{23} -} - -func (x *InfinityNorm) GetThreshold() float64 { - if x != nil && x.Threshold != nil { - return *x.Threshold - } - return 0 -} - -type FeatureComparator struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - InfinityNorm *InfinityNorm `protobuf:"bytes,1,opt,name=infinity_norm,json=infinityNorm" json:"infinity_norm,omitempty"` -} - -func (x *FeatureComparator) Reset() { - *x = FeatureComparator{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[24] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *FeatureComparator) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*FeatureComparator) ProtoMessage() {} - -func (x *FeatureComparator) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[24] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use FeatureComparator.ProtoReflect.Descriptor instead. -func (*FeatureComparator) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_schema_proto_rawDescGZIP(), []int{24} -} - -func (x *FeatureComparator) GetInfinityNorm() *InfinityNorm { - if x != nil { - return x.InfinityNorm - } - return nil -} - -// A TensorRepresentation captures the intent for converting columns in a -// dataset to TensorFlow Tensors (or more generally, tf.CompositeTensors). -// Note that one tf.CompositeTensor may consist of data from multiple columns, -// for example, a N-dimensional tf.SparseTensor may need N + 1 columns to -// provide the sparse indices and values. -// Note that the "column name" that a TensorRepresentation needs is a -// string, not a Path -- it means that the column name identifies a top-level -// Feature in the schema (i.e. you cannot specify a Feature nested in a STRUCT -// Feature). -type TensorRepresentation struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Types that are assignable to Kind: - // *TensorRepresentation_DenseTensor_ - // *TensorRepresentation_VarlenSparseTensor - // *TensorRepresentation_SparseTensor_ - Kind isTensorRepresentation_Kind `protobuf_oneof:"kind"` -} - -func (x *TensorRepresentation) Reset() { - *x = TensorRepresentation{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[25] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *TensorRepresentation) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*TensorRepresentation) ProtoMessage() {} - -func (x *TensorRepresentation) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[25] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use TensorRepresentation.ProtoReflect.Descriptor instead. -func (*TensorRepresentation) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_schema_proto_rawDescGZIP(), []int{25} -} - -func (m *TensorRepresentation) GetKind() isTensorRepresentation_Kind { - if m != nil { - return m.Kind - } - return nil -} - -func (x *TensorRepresentation) GetDenseTensor() *TensorRepresentation_DenseTensor { - if x, ok := x.GetKind().(*TensorRepresentation_DenseTensor_); ok { - return x.DenseTensor - } - return nil -} - -func (x *TensorRepresentation) GetVarlenSparseTensor() *TensorRepresentation_VarLenSparseTensor { - if x, ok := x.GetKind().(*TensorRepresentation_VarlenSparseTensor); ok { - return x.VarlenSparseTensor - } - return nil -} - -func (x *TensorRepresentation) GetSparseTensor() *TensorRepresentation_SparseTensor { - if x, ok := x.GetKind().(*TensorRepresentation_SparseTensor_); ok { - return x.SparseTensor - } - return nil -} - -type isTensorRepresentation_Kind interface { - isTensorRepresentation_Kind() -} - -type TensorRepresentation_DenseTensor_ struct { - DenseTensor *TensorRepresentation_DenseTensor `protobuf:"bytes,1,opt,name=dense_tensor,json=denseTensor,oneof"` -} - -type TensorRepresentation_VarlenSparseTensor struct { - VarlenSparseTensor *TensorRepresentation_VarLenSparseTensor `protobuf:"bytes,2,opt,name=varlen_sparse_tensor,json=varlenSparseTensor,oneof"` -} - -type TensorRepresentation_SparseTensor_ struct { - SparseTensor *TensorRepresentation_SparseTensor `protobuf:"bytes,3,opt,name=sparse_tensor,json=sparseTensor,oneof"` -} - -func (*TensorRepresentation_DenseTensor_) isTensorRepresentation_Kind() {} - -func (*TensorRepresentation_VarlenSparseTensor) isTensorRepresentation_Kind() {} - -func (*TensorRepresentation_SparseTensor_) isTensorRepresentation_Kind() {} - -// A TensorRepresentationGroup is a collection of TensorRepresentations with -// names. These names may serve as identifiers when converting the dataset -// to a collection of Tensors or tf.CompositeTensors. -// For example, given the following group: -// { -// key: "dense_tensor" -// tensor_representation { -// dense_tensor { -// column_name: "univalent_feature" -// shape { -// dim { -// size: 1 -// } -// } -// default_value { -// float_value: 0 -// } -// } -// } -// } -// { -// key: "varlen_sparse_tensor" -// tensor_representation { -// varlen_sparse_tensor { -// column_name: "multivalent_feature" -// } -// } -// } -// -// Then the schema is expected to have feature "univalent_feature" and -// "multivalent_feature", and when a batch of data is converted to Tensors using -// this TensorRepresentationGroup, the result may be the following dict: -// { -// "dense_tensor": tf.Tensor(...), -// "varlen_sparse_tensor": tf.SparseTensor(...), -// } -type TensorRepresentationGroup struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - TensorRepresentation map[string]*TensorRepresentation `protobuf:"bytes,1,rep,name=tensor_representation,json=tensorRepresentation" json:"tensor_representation,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` -} - -func (x *TensorRepresentationGroup) Reset() { - *x = TensorRepresentationGroup{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[26] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *TensorRepresentationGroup) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*TensorRepresentationGroup) ProtoMessage() {} - -func (x *TensorRepresentationGroup) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[26] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use TensorRepresentationGroup.ProtoReflect.Descriptor instead. -func (*TensorRepresentationGroup) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_schema_proto_rawDescGZIP(), []int{26} -} - -func (x *TensorRepresentationGroup) GetTensorRepresentation() map[string]*TensorRepresentation { - if x != nil { - return x.TensorRepresentation - } - return nil -} - -// An axis in a multi-dimensional feature representation. -type FixedShape_Dim struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Size *int64 `protobuf:"varint,1,opt,name=size" json:"size,omitempty"` - // Optional name of the tensor dimension. - Name *string `protobuf:"bytes,2,opt,name=name" json:"name,omitempty"` -} - -func (x *FixedShape_Dim) Reset() { - *x = FixedShape_Dim{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[28] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *FixedShape_Dim) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*FixedShape_Dim) ProtoMessage() {} - -func (x *FixedShape_Dim) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[28] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use FixedShape_Dim.ProtoReflect.Descriptor instead. -func (*FixedShape_Dim) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_schema_proto_rawDescGZIP(), []int{5, 0} -} - -func (x *FixedShape_Dim) GetSize() int64 { - if x != nil && x.Size != nil { - return *x.Size - } - return 0 -} - -func (x *FixedShape_Dim) GetName() string { - if x != nil && x.Name != nil { - return *x.Name - } - return "" -} - -type SparseFeature_IndexFeature struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Name of the index-feature. This should be a reference to an existing - // feature in the schema. - Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` -} - -func (x *SparseFeature_IndexFeature) Reset() { - *x = SparseFeature_IndexFeature{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[29] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *SparseFeature_IndexFeature) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*SparseFeature_IndexFeature) ProtoMessage() {} - -func (x *SparseFeature_IndexFeature) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[29] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use SparseFeature_IndexFeature.ProtoReflect.Descriptor instead. -func (*SparseFeature_IndexFeature) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_schema_proto_rawDescGZIP(), []int{8, 0} -} - -func (x *SparseFeature_IndexFeature) GetName() string { - if x != nil && x.Name != nil { - return *x.Name - } - return "" -} - -type SparseFeature_ValueFeature struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Name of the value-feature. This should be a reference to an existing - // feature in the schema. - Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` -} - -func (x *SparseFeature_ValueFeature) Reset() { - *x = SparseFeature_ValueFeature{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[30] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *SparseFeature_ValueFeature) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*SparseFeature_ValueFeature) ProtoMessage() {} - -func (x *SparseFeature_ValueFeature) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[30] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use SparseFeature_ValueFeature.ProtoReflect.Descriptor instead. -func (*SparseFeature_ValueFeature) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_schema_proto_rawDescGZIP(), []int{8, 1} -} - -func (x *SparseFeature_ValueFeature) GetName() string { - if x != nil && x.Name != nil { - return *x.Name - } - return "" -} - -type TensorRepresentation_DefaultValue struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Types that are assignable to Kind: - // *TensorRepresentation_DefaultValue_FloatValue - // *TensorRepresentation_DefaultValue_IntValue - // *TensorRepresentation_DefaultValue_BytesValue - // *TensorRepresentation_DefaultValue_UintValue - Kind isTensorRepresentation_DefaultValue_Kind `protobuf_oneof:"kind"` -} - -func (x *TensorRepresentation_DefaultValue) Reset() { - *x = TensorRepresentation_DefaultValue{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[31] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *TensorRepresentation_DefaultValue) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*TensorRepresentation_DefaultValue) ProtoMessage() {} - -func (x *TensorRepresentation_DefaultValue) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[31] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use TensorRepresentation_DefaultValue.ProtoReflect.Descriptor instead. -func (*TensorRepresentation_DefaultValue) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_schema_proto_rawDescGZIP(), []int{25, 0} -} - -func (m *TensorRepresentation_DefaultValue) GetKind() isTensorRepresentation_DefaultValue_Kind { - if m != nil { - return m.Kind - } - return nil -} - -func (x *TensorRepresentation_DefaultValue) GetFloatValue() float64 { - if x, ok := x.GetKind().(*TensorRepresentation_DefaultValue_FloatValue); ok { - return x.FloatValue - } - return 0 -} - -func (x *TensorRepresentation_DefaultValue) GetIntValue() int64 { - if x, ok := x.GetKind().(*TensorRepresentation_DefaultValue_IntValue); ok { - return x.IntValue - } - return 0 -} - -func (x *TensorRepresentation_DefaultValue) GetBytesValue() []byte { - if x, ok := x.GetKind().(*TensorRepresentation_DefaultValue_BytesValue); ok { - return x.BytesValue - } - return nil -} - -func (x *TensorRepresentation_DefaultValue) GetUintValue() uint64 { - if x, ok := x.GetKind().(*TensorRepresentation_DefaultValue_UintValue); ok { - return x.UintValue - } - return 0 -} - -type isTensorRepresentation_DefaultValue_Kind interface { - isTensorRepresentation_DefaultValue_Kind() -} - -type TensorRepresentation_DefaultValue_FloatValue struct { - FloatValue float64 `protobuf:"fixed64,1,opt,name=float_value,json=floatValue,oneof"` -} - -type TensorRepresentation_DefaultValue_IntValue struct { - // Note that the data column might be of a shorter integral type. It's the - // user's responsitiblity to make sure the default value fits that type. - IntValue int64 `protobuf:"varint,2,opt,name=int_value,json=intValue,oneof"` -} - -type TensorRepresentation_DefaultValue_BytesValue struct { - BytesValue []byte `protobuf:"bytes,3,opt,name=bytes_value,json=bytesValue,oneof"` -} - -type TensorRepresentation_DefaultValue_UintValue struct { - // uint_value should only be used if the default value can't fit in a - // int64 (`int_value`). - UintValue uint64 `protobuf:"varint,4,opt,name=uint_value,json=uintValue,oneof"` -} - -func (*TensorRepresentation_DefaultValue_FloatValue) isTensorRepresentation_DefaultValue_Kind() {} - -func (*TensorRepresentation_DefaultValue_IntValue) isTensorRepresentation_DefaultValue_Kind() {} - -func (*TensorRepresentation_DefaultValue_BytesValue) isTensorRepresentation_DefaultValue_Kind() {} - -func (*TensorRepresentation_DefaultValue_UintValue) isTensorRepresentation_DefaultValue_Kind() {} - -// A tf.Tensor -type TensorRepresentation_DenseTensor struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Identifies the column in the dataset that provides the values of this - // Tensor. - ColumnName *string `protobuf:"bytes,1,opt,name=column_name,json=columnName" json:"column_name,omitempty"` - // The shape of each row of the data (i.e. does not include the batch - // dimension) - Shape *FixedShape `protobuf:"bytes,2,opt,name=shape" json:"shape,omitempty"` - // If this column is missing values in a row, the default_value will be - // used to fill that row. - DefaultValue *TensorRepresentation_DefaultValue `protobuf:"bytes,3,opt,name=default_value,json=defaultValue" json:"default_value,omitempty"` -} - -func (x *TensorRepresentation_DenseTensor) Reset() { - *x = TensorRepresentation_DenseTensor{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[32] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *TensorRepresentation_DenseTensor) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*TensorRepresentation_DenseTensor) ProtoMessage() {} - -func (x *TensorRepresentation_DenseTensor) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[32] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use TensorRepresentation_DenseTensor.ProtoReflect.Descriptor instead. -func (*TensorRepresentation_DenseTensor) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_schema_proto_rawDescGZIP(), []int{25, 1} -} - -func (x *TensorRepresentation_DenseTensor) GetColumnName() string { - if x != nil && x.ColumnName != nil { - return *x.ColumnName - } - return "" -} - -func (x *TensorRepresentation_DenseTensor) GetShape() *FixedShape { - if x != nil { - return x.Shape - } - return nil -} - -func (x *TensorRepresentation_DenseTensor) GetDefaultValue() *TensorRepresentation_DefaultValue { - if x != nil { - return x.DefaultValue - } - return nil -} - -// A ragged tf.SparseTensor that models nested lists. -type TensorRepresentation_VarLenSparseTensor struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Identifies the column in the dataset that should be converted to the - // VarLenSparseTensor. - ColumnName *string `protobuf:"bytes,1,opt,name=column_name,json=columnName" json:"column_name,omitempty"` -} - -func (x *TensorRepresentation_VarLenSparseTensor) Reset() { - *x = TensorRepresentation_VarLenSparseTensor{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[33] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *TensorRepresentation_VarLenSparseTensor) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*TensorRepresentation_VarLenSparseTensor) ProtoMessage() {} - -func (x *TensorRepresentation_VarLenSparseTensor) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[33] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use TensorRepresentation_VarLenSparseTensor.ProtoReflect.Descriptor instead. -func (*TensorRepresentation_VarLenSparseTensor) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_schema_proto_rawDescGZIP(), []int{25, 2} -} - -func (x *TensorRepresentation_VarLenSparseTensor) GetColumnName() string { - if x != nil && x.ColumnName != nil { - return *x.ColumnName - } - return "" -} - -// A tf.SparseTensor whose indices and values come from separate data columns. -// This will replace Schema.sparse_feature eventually. -// The index columns must be of INT type, and all the columns must co-occur -// and have the same valency at the same row. -type TensorRepresentation_SparseTensor struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // The dense shape of the resulting SparseTensor (does not include the batch - // dimension). - DenseShape *FixedShape `protobuf:"bytes,1,opt,name=dense_shape,json=denseShape" json:"dense_shape,omitempty"` - // The columns constitute the coordinates of the values. - // indices_column[i][j] contains the coordinate of the i-th dimension of the - // j-th value. - IndexColumnNames []string `protobuf:"bytes,2,rep,name=index_column_names,json=indexColumnNames" json:"index_column_names,omitempty"` - // The column that contains the values. - ValueColumnName *string `protobuf:"bytes,3,opt,name=value_column_name,json=valueColumnName" json:"value_column_name,omitempty"` -} - -func (x *TensorRepresentation_SparseTensor) Reset() { - *x = TensorRepresentation_SparseTensor{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[34] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *TensorRepresentation_SparseTensor) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*TensorRepresentation_SparseTensor) ProtoMessage() {} - -func (x *TensorRepresentation_SparseTensor) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[34] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use TensorRepresentation_SparseTensor.ProtoReflect.Descriptor instead. -func (*TensorRepresentation_SparseTensor) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_schema_proto_rawDescGZIP(), []int{25, 3} -} - -func (x *TensorRepresentation_SparseTensor) GetDenseShape() *FixedShape { - if x != nil { - return x.DenseShape - } - return nil -} - -func (x *TensorRepresentation_SparseTensor) GetIndexColumnNames() []string { - if x != nil { - return x.IndexColumnNames - } - return nil -} - -func (x *TensorRepresentation_SparseTensor) GetValueColumnName() string { - if x != nil && x.ValueColumnName != nil { - return *x.ValueColumnName - } - return "" -} - -var File_tensorflow_metadata_proto_v0_schema_proto protoreflect.FileDescriptor - -var file_tensorflow_metadata_proto_v0_schema_proto_rawDesc = []byte{ - 0x0a, 0x29, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x6d, 0x65, 0x74, - 0x61, 0x64, 0x61, 0x74, 0x61, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x76, 0x30, 0x2f, 0x73, - 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x16, 0x74, 0x65, 0x6e, - 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, - 0x2e, 0x76, 0x30, 0x1a, 0x19, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x61, 0x6e, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x27, - 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, - 0x61, 0x74, 0x61, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x76, 0x30, 0x2f, 0x70, 0x61, 0x74, - 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x8c, 0x07, 0x0a, 0x06, 0x53, 0x63, 0x68, 0x65, - 0x6d, 0x61, 0x12, 0x39, 0x0a, 0x07, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x01, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, - 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x46, 0x65, 0x61, - 0x74, 0x75, 0x72, 0x65, 0x52, 0x07, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x12, 0x4c, 0x0a, - 0x0e, 0x73, 0x70, 0x61, 0x72, 0x73, 0x65, 0x5f, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, - 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, - 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x53, - 0x70, 0x61, 0x72, 0x73, 0x65, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x52, 0x0d, 0x73, 0x70, - 0x61, 0x72, 0x73, 0x65, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x12, 0x52, 0x0a, 0x10, 0x77, - 0x65, 0x69, 0x67, 0x68, 0x74, 0x65, 0x64, 0x5f, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, - 0x0c, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, - 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x57, - 0x65, 0x69, 0x67, 0x68, 0x74, 0x65, 0x64, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x52, 0x0f, - 0x77, 0x65, 0x69, 0x67, 0x68, 0x74, 0x65, 0x64, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x12, - 0x49, 0x0a, 0x0d, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, - 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, - 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, - 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x52, 0x0c, 0x73, 0x74, - 0x72, 0x69, 0x6e, 0x67, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x46, 0x0a, 0x0c, 0x66, 0x6c, - 0x6f, 0x61, 0x74, 0x5f, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x09, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x23, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, - 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x44, - 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x52, 0x0b, 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x44, 0x6f, 0x6d, 0x61, - 0x69, 0x6e, 0x12, 0x40, 0x0a, 0x0a, 0x69, 0x6e, 0x74, 0x5f, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, - 0x18, 0x0a, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, - 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, - 0x49, 0x6e, 0x74, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x52, 0x09, 0x69, 0x6e, 0x74, 0x44, 0x6f, - 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x2f, 0x0a, 0x13, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x5f, - 0x65, 0x6e, 0x76, 0x69, 0x72, 0x6f, 0x6e, 0x6d, 0x65, 0x6e, 0x74, 0x18, 0x05, 0x20, 0x03, 0x28, - 0x09, 0x52, 0x12, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x45, 0x6e, 0x76, 0x69, 0x72, 0x6f, - 0x6e, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x42, 0x0a, 0x0a, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x74, 0x65, 0x6e, 0x73, - 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, - 0x76, 0x30, 0x2e, 0x41, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0a, 0x61, - 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x5b, 0x0a, 0x13, 0x64, 0x61, 0x74, - 0x61, 0x73, 0x65, 0x74, 0x5f, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x61, 0x69, 0x6e, 0x74, 0x73, - 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, - 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, - 0x44, 0x61, 0x74, 0x61, 0x73, 0x65, 0x74, 0x43, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x61, 0x69, 0x6e, - 0x74, 0x73, 0x52, 0x12, 0x64, 0x61, 0x74, 0x61, 0x73, 0x65, 0x74, 0x43, 0x6f, 0x6e, 0x73, 0x74, - 0x72, 0x61, 0x69, 0x6e, 0x74, 0x73, 0x12, 0x7d, 0x0a, 0x1b, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, - 0x5f, 0x72, 0x65, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, - 0x67, 0x72, 0x6f, 0x75, 0x70, 0x18, 0x0d, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x3d, 0x2e, 0x74, 0x65, - 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, - 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x54, 0x65, 0x6e, 0x73, - 0x6f, 0x72, 0x52, 0x65, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x47, 0x72, 0x6f, 0x75, 0x70, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x19, 0x74, 0x65, 0x6e, 0x73, - 0x6f, 0x72, 0x52, 0x65, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x47, 0x72, 0x6f, 0x75, 0x70, 0x1a, 0x7f, 0x0a, 0x1e, 0x54, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x52, - 0x65, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x47, 0x72, 0x6f, - 0x75, 0x70, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x47, 0x0a, 0x05, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, - 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, - 0x30, 0x2e, 0x54, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x52, 0x65, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, - 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x52, 0x05, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xb9, 0x0e, 0x0a, 0x07, 0x46, 0x65, 0x61, 0x74, 0x75, - 0x72, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x22, 0x0a, 0x0a, 0x64, 0x65, 0x70, 0x72, 0x65, 0x63, - 0x61, 0x74, 0x65, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x42, 0x02, 0x18, 0x01, 0x52, 0x0a, - 0x64, 0x65, 0x70, 0x72, 0x65, 0x63, 0x61, 0x74, 0x65, 0x64, 0x12, 0x45, 0x0a, 0x08, 0x70, 0x72, - 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x74, - 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, - 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x50, 0x72, 0x65, - 0x73, 0x65, 0x6e, 0x63, 0x65, 0x48, 0x00, 0x52, 0x08, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, - 0x65, 0x12, 0x5b, 0x0a, 0x0e, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x5f, 0x70, 0x72, 0x65, 0x73, 0x65, - 0x6e, 0x63, 0x65, 0x18, 0x11, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x74, 0x65, 0x6e, 0x73, - 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, - 0x76, 0x30, 0x2e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x50, 0x72, 0x65, 0x73, 0x65, 0x6e, - 0x63, 0x65, 0x57, 0x69, 0x74, 0x68, 0x69, 0x6e, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x48, 0x00, 0x52, - 0x0d, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x50, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x12, 0x3a, - 0x0a, 0x05, 0x73, 0x68, 0x61, 0x70, 0x65, 0x18, 0x17, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, - 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, - 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x46, 0x69, 0x78, 0x65, 0x64, 0x53, 0x68, 0x61, 0x70, - 0x65, 0x48, 0x01, 0x52, 0x05, 0x73, 0x68, 0x61, 0x70, 0x65, 0x12, 0x45, 0x0a, 0x0b, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x22, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, - 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x43, 0x6f, - 0x75, 0x6e, 0x74, 0x48, 0x01, 0x52, 0x0a, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x43, 0x6f, 0x75, 0x6e, - 0x74, 0x12, 0x37, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0e, 0x32, - 0x23, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, - 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, - 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x18, 0x0a, 0x06, 0x64, 0x6f, - 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x48, 0x02, 0x52, 0x06, 0x64, 0x6f, - 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x42, 0x0a, 0x0a, 0x69, 0x6e, 0x74, 0x5f, 0x64, 0x6f, 0x6d, 0x61, - 0x69, 0x6e, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, - 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, - 0x30, 0x2e, 0x49, 0x6e, 0x74, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x48, 0x02, 0x52, 0x09, 0x69, - 0x6e, 0x74, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x48, 0x0a, 0x0c, 0x66, 0x6c, 0x6f, 0x61, - 0x74, 0x5f, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x23, - 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, - 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x44, 0x6f, 0x6d, - 0x61, 0x69, 0x6e, 0x48, 0x02, 0x52, 0x0b, 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x44, 0x6f, 0x6d, 0x61, - 0x69, 0x6e, 0x12, 0x4b, 0x0a, 0x0d, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x64, 0x6f, 0x6d, - 0x61, 0x69, 0x6e, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x74, 0x65, 0x6e, 0x73, - 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, - 0x76, 0x30, 0x2e, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x48, - 0x02, 0x52, 0x0c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, - 0x45, 0x0a, 0x0b, 0x62, 0x6f, 0x6f, 0x6c, 0x5f, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x0d, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, - 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x42, 0x6f, - 0x6f, 0x6c, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x48, 0x02, 0x52, 0x0a, 0x62, 0x6f, 0x6f, 0x6c, - 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x4b, 0x0a, 0x0d, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, - 0x5f, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x1d, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, - 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, - 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x44, 0x6f, 0x6d, - 0x61, 0x69, 0x6e, 0x48, 0x02, 0x52, 0x0c, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x44, 0x6f, 0x6d, - 0x61, 0x69, 0x6e, 0x12, 0x67, 0x0a, 0x17, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x61, 0x6c, 0x5f, 0x6c, - 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x5f, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x18, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, - 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x4e, 0x61, - 0x74, 0x75, 0x72, 0x61, 0x6c, 0x4c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x44, 0x6f, 0x6d, - 0x61, 0x69, 0x6e, 0x48, 0x02, 0x52, 0x15, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x61, 0x6c, 0x4c, 0x61, - 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x48, 0x0a, 0x0c, - 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x19, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, - 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x49, 0x6d, 0x61, 0x67, - 0x65, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x48, 0x02, 0x52, 0x0b, 0x69, 0x6d, 0x61, 0x67, 0x65, - 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x42, 0x0a, 0x0a, 0x6d, 0x69, 0x64, 0x5f, 0x64, 0x6f, - 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x1a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x74, 0x65, 0x6e, - 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, - 0x2e, 0x76, 0x30, 0x2e, 0x4d, 0x49, 0x44, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x48, 0x02, 0x52, - 0x09, 0x6d, 0x69, 0x64, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x42, 0x0a, 0x0a, 0x75, 0x72, - 0x6c, 0x5f, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x1b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, - 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, - 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x55, 0x52, 0x4c, 0x44, 0x6f, 0x6d, 0x61, 0x69, - 0x6e, 0x48, 0x02, 0x52, 0x09, 0x75, 0x72, 0x6c, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x45, - 0x0a, 0x0b, 0x74, 0x69, 0x6d, 0x65, 0x5f, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x1c, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, - 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x54, 0x69, 0x6d, - 0x65, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x48, 0x02, 0x52, 0x0a, 0x74, 0x69, 0x6d, 0x65, 0x44, - 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x56, 0x0a, 0x12, 0x74, 0x69, 0x6d, 0x65, 0x5f, 0x6f, 0x66, - 0x5f, 0x64, 0x61, 0x79, 0x5f, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x1e, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x27, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, - 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x4f, - 0x66, 0x44, 0x61, 0x79, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x48, 0x02, 0x52, 0x0f, 0x74, 0x69, - 0x6d, 0x65, 0x4f, 0x66, 0x44, 0x61, 0x79, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x6a, 0x0a, - 0x18, 0x64, 0x69, 0x73, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x72, 0x61, 0x69, 0x6e, 0x74, 0x73, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x2f, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, - 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x44, 0x69, 0x73, 0x74, 0x72, 0x69, 0x62, - 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x61, 0x69, 0x6e, 0x74, 0x73, - 0x52, 0x17, 0x64, 0x69, 0x73, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, - 0x6e, 0x73, 0x74, 0x72, 0x61, 0x69, 0x6e, 0x74, 0x73, 0x12, 0x42, 0x0a, 0x0a, 0x61, 0x6e, 0x6e, - 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x10, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, - 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, - 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x41, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x52, 0x0a, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x52, 0x0a, - 0x0f, 0x73, 0x6b, 0x65, 0x77, 0x5f, 0x63, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, - 0x18, 0x12, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, - 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, - 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, - 0x72, 0x52, 0x0e, 0x73, 0x6b, 0x65, 0x77, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, - 0x72, 0x12, 0x54, 0x0a, 0x10, 0x64, 0x72, 0x69, 0x66, 0x74, 0x5f, 0x63, 0x6f, 0x6d, 0x70, 0x61, - 0x72, 0x61, 0x74, 0x6f, 0x72, 0x18, 0x15, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x74, 0x65, - 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, - 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x43, 0x6f, 0x6d, 0x70, - 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x52, 0x0f, 0x64, 0x72, 0x69, 0x66, 0x74, 0x43, 0x6f, 0x6d, - 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x25, 0x0a, 0x0e, 0x69, 0x6e, 0x5f, 0x65, 0x6e, - 0x76, 0x69, 0x72, 0x6f, 0x6e, 0x6d, 0x65, 0x6e, 0x74, 0x18, 0x14, 0x20, 0x03, 0x28, 0x09, 0x52, - 0x0d, 0x69, 0x6e, 0x45, 0x6e, 0x76, 0x69, 0x72, 0x6f, 0x6e, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x2c, - 0x0a, 0x12, 0x6e, 0x6f, 0x74, 0x5f, 0x69, 0x6e, 0x5f, 0x65, 0x6e, 0x76, 0x69, 0x72, 0x6f, 0x6e, - 0x6d, 0x65, 0x6e, 0x74, 0x18, 0x13, 0x20, 0x03, 0x28, 0x09, 0x52, 0x10, 0x6e, 0x6f, 0x74, 0x49, - 0x6e, 0x45, 0x6e, 0x76, 0x69, 0x72, 0x6f, 0x6e, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x4f, 0x0a, 0x0f, - 0x6c, 0x69, 0x66, 0x65, 0x63, 0x79, 0x63, 0x6c, 0x65, 0x5f, 0x73, 0x74, 0x61, 0x67, 0x65, 0x18, - 0x16, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x26, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, - 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x4c, - 0x69, 0x66, 0x65, 0x63, 0x79, 0x63, 0x6c, 0x65, 0x53, 0x74, 0x61, 0x67, 0x65, 0x52, 0x0e, 0x6c, - 0x69, 0x66, 0x65, 0x63, 0x79, 0x63, 0x6c, 0x65, 0x53, 0x74, 0x61, 0x67, 0x65, 0x42, 0x16, 0x0a, - 0x14, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, - 0x61, 0x69, 0x6e, 0x74, 0x73, 0x42, 0x0c, 0x0a, 0x0a, 0x73, 0x68, 0x61, 0x70, 0x65, 0x5f, 0x74, - 0x79, 0x70, 0x65, 0x42, 0x0d, 0x0a, 0x0b, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x5f, 0x69, 0x6e, - 0x66, 0x6f, 0x22, 0x75, 0x0a, 0x0a, 0x41, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x12, 0x10, 0x0a, 0x03, 0x74, 0x61, 0x67, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x03, 0x74, - 0x61, 0x67, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x18, 0x02, 0x20, - 0x03, 0x28, 0x09, 0x52, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x3b, 0x0a, 0x0e, - 0x65, 0x78, 0x74, 0x72, 0x61, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x03, - 0x20, 0x03, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x41, 0x6e, 0x79, 0x52, 0x0d, 0x65, 0x78, 0x74, 0x72, - 0x61, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x22, 0x84, 0x01, 0x0a, 0x16, 0x4e, 0x75, - 0x6d, 0x65, 0x72, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x72, - 0x61, 0x74, 0x6f, 0x72, 0x12, 0x34, 0x0a, 0x16, 0x6d, 0x69, 0x6e, 0x5f, 0x66, 0x72, 0x61, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x74, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x01, 0x52, 0x14, 0x6d, 0x69, 0x6e, 0x46, 0x72, 0x61, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x54, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x12, 0x34, 0x0a, 0x16, 0x6d, 0x61, - 0x78, 0x5f, 0x66, 0x72, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x74, 0x68, 0x72, 0x65, 0x73, - 0x68, 0x6f, 0x6c, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, 0x52, 0x14, 0x6d, 0x61, 0x78, 0x46, - 0x72, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64, - 0x22, 0xac, 0x02, 0x0a, 0x12, 0x44, 0x61, 0x74, 0x61, 0x73, 0x65, 0x74, 0x43, 0x6f, 0x6e, 0x73, - 0x74, 0x72, 0x61, 0x69, 0x6e, 0x74, 0x73, 0x12, 0x71, 0x0a, 0x1d, 0x6e, 0x75, 0x6d, 0x5f, 0x65, - 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x73, 0x5f, 0x64, 0x72, 0x69, 0x66, 0x74, 0x5f, 0x63, 0x6f, - 0x6d, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2e, - 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, - 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x4e, 0x75, 0x6d, 0x65, 0x72, 0x69, 0x63, 0x56, - 0x61, 0x6c, 0x75, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x52, 0x1a, - 0x6e, 0x75, 0x6d, 0x45, 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x73, 0x44, 0x72, 0x69, 0x66, 0x74, - 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x75, 0x0a, 0x1f, 0x6e, 0x75, - 0x6d, 0x5f, 0x65, 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x73, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, - 0x6f, 0x6e, 0x5f, 0x63, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, - 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x4e, 0x75, 0x6d, - 0x65, 0x72, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x61, - 0x74, 0x6f, 0x72, 0x52, 0x1c, 0x6e, 0x75, 0x6d, 0x45, 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x73, - 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, - 0x72, 0x12, 0x2c, 0x0a, 0x12, 0x6d, 0x69, 0x6e, 0x5f, 0x65, 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, - 0x73, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x10, 0x6d, - 0x69, 0x6e, 0x45, 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x73, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x22, - 0x75, 0x0a, 0x0a, 0x46, 0x69, 0x78, 0x65, 0x64, 0x53, 0x68, 0x61, 0x70, 0x65, 0x12, 0x38, 0x0a, - 0x03, 0x64, 0x69, 0x6d, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x74, 0x65, 0x6e, - 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, - 0x2e, 0x76, 0x30, 0x2e, 0x46, 0x69, 0x78, 0x65, 0x64, 0x53, 0x68, 0x61, 0x70, 0x65, 0x2e, 0x44, - 0x69, 0x6d, 0x52, 0x03, 0x64, 0x69, 0x6d, 0x1a, 0x2d, 0x0a, 0x03, 0x44, 0x69, 0x6d, 0x12, 0x12, - 0x0a, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x04, 0x73, 0x69, - 0x7a, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x30, 0x0a, 0x0a, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x43, - 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x6d, 0x69, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x03, 0x52, 0x03, 0x6d, 0x69, 0x6e, 0x12, 0x10, 0x0a, 0x03, 0x6d, 0x61, 0x78, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x03, 0x52, 0x03, 0x6d, 0x61, 0x78, 0x22, 0xf3, 0x01, 0x0a, 0x0f, 0x57, 0x65, 0x69, - 0x67, 0x68, 0x74, 0x65, 0x64, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x12, 0x12, 0x0a, 0x04, - 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, - 0x12, 0x36, 0x0a, 0x07, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x1c, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, - 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x50, 0x61, 0x74, 0x68, 0x52, - 0x07, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x12, 0x43, 0x0a, 0x0e, 0x77, 0x65, 0x69, 0x67, - 0x68, 0x74, 0x5f, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x1c, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, - 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x50, 0x61, 0x74, 0x68, 0x52, 0x0d, - 0x77, 0x65, 0x69, 0x67, 0x68, 0x74, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x12, 0x4f, 0x0a, - 0x0f, 0x6c, 0x69, 0x66, 0x65, 0x63, 0x79, 0x63, 0x6c, 0x65, 0x5f, 0x73, 0x74, 0x61, 0x67, 0x65, - 0x18, 0x04, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x26, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, - 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, - 0x4c, 0x69, 0x66, 0x65, 0x63, 0x79, 0x63, 0x6c, 0x65, 0x53, 0x74, 0x61, 0x67, 0x65, 0x52, 0x0e, - 0x6c, 0x69, 0x66, 0x65, 0x63, 0x79, 0x63, 0x6c, 0x65, 0x53, 0x74, 0x61, 0x67, 0x65, 0x22, 0x80, - 0x05, 0x0a, 0x0d, 0x53, 0x70, 0x61, 0x72, 0x73, 0x65, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, - 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, - 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x22, 0x0a, 0x0a, 0x64, 0x65, 0x70, 0x72, 0x65, 0x63, 0x61, 0x74, - 0x65, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x42, 0x02, 0x18, 0x01, 0x52, 0x0a, 0x64, 0x65, - 0x70, 0x72, 0x65, 0x63, 0x61, 0x74, 0x65, 0x64, 0x12, 0x4f, 0x0a, 0x0f, 0x6c, 0x69, 0x66, 0x65, - 0x63, 0x79, 0x63, 0x6c, 0x65, 0x5f, 0x73, 0x74, 0x61, 0x67, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, - 0x0e, 0x32, 0x26, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, - 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x4c, 0x69, 0x66, 0x65, 0x63, - 0x79, 0x63, 0x6c, 0x65, 0x53, 0x74, 0x61, 0x67, 0x65, 0x52, 0x0e, 0x6c, 0x69, 0x66, 0x65, 0x63, - 0x79, 0x63, 0x6c, 0x65, 0x53, 0x74, 0x61, 0x67, 0x65, 0x12, 0x47, 0x0a, 0x08, 0x70, 0x72, 0x65, - 0x73, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x74, 0x65, - 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, - 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x50, 0x72, 0x65, 0x73, - 0x65, 0x6e, 0x63, 0x65, 0x42, 0x02, 0x18, 0x01, 0x52, 0x08, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, - 0x63, 0x65, 0x12, 0x43, 0x0a, 0x0b, 0x64, 0x65, 0x6e, 0x73, 0x65, 0x5f, 0x73, 0x68, 0x61, 0x70, - 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, - 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, - 0x2e, 0x46, 0x69, 0x78, 0x65, 0x64, 0x53, 0x68, 0x61, 0x70, 0x65, 0x52, 0x0a, 0x64, 0x65, 0x6e, - 0x73, 0x65, 0x53, 0x68, 0x61, 0x70, 0x65, 0x12, 0x57, 0x0a, 0x0d, 0x69, 0x6e, 0x64, 0x65, 0x78, - 0x5f, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x32, - 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, - 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x53, 0x70, 0x61, 0x72, 0x73, 0x65, 0x46, 0x65, - 0x61, 0x74, 0x75, 0x72, 0x65, 0x2e, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x46, 0x65, 0x61, 0x74, 0x75, - 0x72, 0x65, 0x52, 0x0c, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, - 0x12, 0x1b, 0x0a, 0x09, 0x69, 0x73, 0x5f, 0x73, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x18, 0x08, 0x20, - 0x01, 0x28, 0x08, 0x52, 0x08, 0x69, 0x73, 0x53, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x12, 0x57, 0x0a, - 0x0d, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5f, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x09, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, - 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x53, 0x70, - 0x61, 0x72, 0x73, 0x65, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x2e, 0x56, 0x61, 0x6c, 0x75, - 0x65, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x52, 0x0c, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x46, - 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x12, 0x3b, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x0a, - 0x20, 0x01, 0x28, 0x0e, 0x32, 0x23, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, - 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x46, 0x65, - 0x61, 0x74, 0x75, 0x72, 0x65, 0x54, 0x79, 0x70, 0x65, 0x42, 0x02, 0x18, 0x01, 0x52, 0x04, 0x74, - 0x79, 0x70, 0x65, 0x1a, 0x22, 0x0a, 0x0c, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x46, 0x65, 0x61, 0x74, - 0x75, 0x72, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x1a, 0x22, 0x0a, 0x0c, 0x56, 0x61, 0x6c, 0x75, 0x65, - 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x4a, 0x04, 0x08, 0x0b, 0x10, - 0x0c, 0x22, 0x44, 0x0a, 0x17, 0x44, 0x69, 0x73, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x69, 0x6f, - 0x6e, 0x43, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x61, 0x69, 0x6e, 0x74, 0x73, 0x12, 0x29, 0x0a, 0x0f, - 0x6d, 0x69, 0x6e, 0x5f, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x5f, 0x6d, 0x61, 0x73, 0x73, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x01, 0x3a, 0x01, 0x31, 0x52, 0x0d, 0x6d, 0x69, 0x6e, 0x44, 0x6f, 0x6d, - 0x61, 0x69, 0x6e, 0x4d, 0x61, 0x73, 0x73, 0x22, 0x6a, 0x0a, 0x09, 0x49, 0x6e, 0x74, 0x44, 0x6f, - 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x6d, 0x69, 0x6e, 0x18, - 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x03, 0x6d, 0x69, 0x6e, 0x12, 0x10, 0x0a, 0x03, 0x6d, 0x61, - 0x78, 0x18, 0x04, 0x20, 0x01, 0x28, 0x03, 0x52, 0x03, 0x6d, 0x61, 0x78, 0x12, 0x25, 0x0a, 0x0e, - 0x69, 0x73, 0x5f, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, 0x18, 0x05, - 0x20, 0x01, 0x28, 0x08, 0x52, 0x0d, 0x69, 0x73, 0x43, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x69, - 0x63, 0x61, 0x6c, 0x22, 0x45, 0x0a, 0x0b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x44, 0x6f, 0x6d, 0x61, - 0x69, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x6d, 0x69, 0x6e, 0x18, 0x03, 0x20, - 0x01, 0x28, 0x02, 0x52, 0x03, 0x6d, 0x69, 0x6e, 0x12, 0x10, 0x0a, 0x03, 0x6d, 0x61, 0x78, 0x18, - 0x04, 0x20, 0x01, 0x28, 0x02, 0x52, 0x03, 0x6d, 0x61, 0x78, 0x22, 0x97, 0x01, 0x0a, 0x0c, 0x53, - 0x74, 0x72, 0x75, 0x63, 0x74, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x39, 0x0a, 0x07, 0x66, - 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x74, - 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, - 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x52, 0x07, 0x66, - 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x12, 0x4c, 0x0a, 0x0e, 0x73, 0x70, 0x61, 0x72, 0x73, 0x65, - 0x5f, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x25, - 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, - 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x53, 0x70, 0x61, 0x72, 0x73, 0x65, 0x46, 0x65, - 0x61, 0x74, 0x75, 0x72, 0x65, 0x52, 0x0d, 0x73, 0x70, 0x61, 0x72, 0x73, 0x65, 0x46, 0x65, 0x61, - 0x74, 0x75, 0x72, 0x65, 0x22, 0x38, 0x0a, 0x0c, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x44, 0x6f, - 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x60, - 0x0a, 0x0a, 0x42, 0x6f, 0x6f, 0x6c, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x12, 0x0a, 0x04, - 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, - 0x12, 0x1d, 0x0a, 0x0a, 0x74, 0x72, 0x75, 0x65, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x74, 0x72, 0x75, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, - 0x1f, 0x0a, 0x0b, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x03, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, - 0x22, 0x17, 0x0a, 0x15, 0x4e, 0x61, 0x74, 0x75, 0x72, 0x61, 0x6c, 0x4c, 0x61, 0x6e, 0x67, 0x75, - 0x61, 0x67, 0x65, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x22, 0x0d, 0x0a, 0x0b, 0x49, 0x6d, 0x61, - 0x67, 0x65, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x22, 0x0b, 0x0a, 0x09, 0x4d, 0x49, 0x44, 0x44, - 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x22, 0x0b, 0x0a, 0x09, 0x55, 0x52, 0x4c, 0x44, 0x6f, 0x6d, 0x61, - 0x69, 0x6e, 0x22, 0xab, 0x02, 0x0a, 0x0a, 0x54, 0x69, 0x6d, 0x65, 0x44, 0x6f, 0x6d, 0x61, 0x69, - 0x6e, 0x12, 0x25, 0x0a, 0x0d, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x66, 0x6f, 0x72, 0x6d, - 0x61, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0c, 0x73, 0x74, 0x72, 0x69, - 0x6e, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x12, 0x5d, 0x0a, 0x0e, 0x69, 0x6e, 0x74, 0x65, - 0x67, 0x65, 0x72, 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, - 0x32, 0x34, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, - 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x44, 0x6f, - 0x6d, 0x61, 0x69, 0x6e, 0x2e, 0x49, 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x54, 0x69, 0x6d, 0x65, - 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x48, 0x00, 0x52, 0x0d, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x65, - 0x72, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x22, 0x8c, 0x01, 0x0a, 0x11, 0x49, 0x6e, 0x74, 0x65, - 0x67, 0x65, 0x72, 0x54, 0x69, 0x6d, 0x65, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x12, 0x12, 0x0a, - 0x0e, 0x46, 0x4f, 0x52, 0x4d, 0x41, 0x54, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, - 0x00, 0x12, 0x0d, 0x0a, 0x09, 0x55, 0x4e, 0x49, 0x58, 0x5f, 0x44, 0x41, 0x59, 0x53, 0x10, 0x05, - 0x12, 0x10, 0x0a, 0x0c, 0x55, 0x4e, 0x49, 0x58, 0x5f, 0x53, 0x45, 0x43, 0x4f, 0x4e, 0x44, 0x53, - 0x10, 0x01, 0x12, 0x15, 0x0a, 0x11, 0x55, 0x4e, 0x49, 0x58, 0x5f, 0x4d, 0x49, 0x4c, 0x4c, 0x49, - 0x53, 0x45, 0x43, 0x4f, 0x4e, 0x44, 0x53, 0x10, 0x02, 0x12, 0x15, 0x0a, 0x11, 0x55, 0x4e, 0x49, - 0x58, 0x5f, 0x4d, 0x49, 0x43, 0x52, 0x4f, 0x53, 0x45, 0x43, 0x4f, 0x4e, 0x44, 0x53, 0x10, 0x03, - 0x12, 0x14, 0x0a, 0x10, 0x55, 0x4e, 0x49, 0x58, 0x5f, 0x4e, 0x41, 0x4e, 0x4f, 0x53, 0x45, 0x43, - 0x4f, 0x4e, 0x44, 0x53, 0x10, 0x04, 0x42, 0x08, 0x0a, 0x06, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, - 0x22, 0xee, 0x01, 0x0a, 0x0f, 0x54, 0x69, 0x6d, 0x65, 0x4f, 0x66, 0x44, 0x61, 0x79, 0x44, 0x6f, - 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x25, 0x0a, 0x0d, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x66, - 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0c, 0x73, - 0x74, 0x72, 0x69, 0x6e, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x12, 0x67, 0x0a, 0x0e, 0x69, - 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x0e, 0x32, 0x3e, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, - 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x54, 0x69, 0x6d, - 0x65, 0x4f, 0x66, 0x44, 0x61, 0x79, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x2e, 0x49, 0x6e, 0x74, - 0x65, 0x67, 0x65, 0x72, 0x54, 0x69, 0x6d, 0x65, 0x4f, 0x66, 0x44, 0x61, 0x79, 0x46, 0x6f, 0x72, - 0x6d, 0x61, 0x74, 0x48, 0x00, 0x52, 0x0d, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x46, 0x6f, - 0x72, 0x6d, 0x61, 0x74, 0x22, 0x41, 0x0a, 0x16, 0x49, 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x54, - 0x69, 0x6d, 0x65, 0x4f, 0x66, 0x44, 0x61, 0x79, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x12, 0x12, - 0x0a, 0x0e, 0x46, 0x4f, 0x52, 0x4d, 0x41, 0x54, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, - 0x10, 0x00, 0x12, 0x13, 0x0a, 0x0f, 0x50, 0x41, 0x43, 0x4b, 0x45, 0x44, 0x5f, 0x36, 0x34, 0x5f, - 0x4e, 0x41, 0x4e, 0x4f, 0x53, 0x10, 0x01, 0x42, 0x08, 0x0a, 0x06, 0x66, 0x6f, 0x72, 0x6d, 0x61, - 0x74, 0x22, 0x51, 0x0a, 0x0f, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x50, 0x72, 0x65, 0x73, - 0x65, 0x6e, 0x63, 0x65, 0x12, 0x21, 0x0a, 0x0c, 0x6d, 0x69, 0x6e, 0x5f, 0x66, 0x72, 0x61, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x01, 0x52, 0x0b, 0x6d, 0x69, 0x6e, 0x46, - 0x72, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1b, 0x0a, 0x09, 0x6d, 0x69, 0x6e, 0x5f, 0x63, - 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, 0x6d, 0x69, 0x6e, 0x43, - 0x6f, 0x75, 0x6e, 0x74, 0x22, 0x38, 0x0a, 0x1a, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x50, - 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x57, 0x69, 0x74, 0x68, 0x69, 0x6e, 0x47, 0x72, 0x6f, - 0x75, 0x70, 0x12, 0x1a, 0x0a, 0x08, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x22, 0x2c, - 0x0a, 0x0c, 0x49, 0x6e, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x79, 0x4e, 0x6f, 0x72, 0x6d, 0x12, 0x1c, - 0x0a, 0x09, 0x74, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x01, 0x52, 0x09, 0x74, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x22, 0x5e, 0x0a, 0x11, - 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, - 0x72, 0x12, 0x49, 0x0a, 0x0d, 0x69, 0x6e, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x79, 0x5f, 0x6e, 0x6f, - 0x72, 0x6d, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, - 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, - 0x30, 0x2e, 0x49, 0x6e, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x79, 0x4e, 0x6f, 0x72, 0x6d, 0x52, 0x0c, - 0x69, 0x6e, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x79, 0x4e, 0x6f, 0x72, 0x6d, 0x22, 0xa5, 0x07, 0x0a, - 0x14, 0x54, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x52, 0x65, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x74, - 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x5d, 0x0a, 0x0c, 0x64, 0x65, 0x6e, 0x73, 0x65, 0x5f, 0x74, - 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x38, 0x2e, 0x74, 0x65, - 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, - 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x54, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x52, 0x65, 0x70, 0x72, 0x65, - 0x73, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x44, 0x65, 0x6e, 0x73, 0x65, 0x54, - 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x0b, 0x64, 0x65, 0x6e, 0x73, 0x65, 0x54, 0x65, - 0x6e, 0x73, 0x6f, 0x72, 0x12, 0x73, 0x0a, 0x14, 0x76, 0x61, 0x72, 0x6c, 0x65, 0x6e, 0x5f, 0x73, - 0x70, 0x61, 0x72, 0x73, 0x65, 0x5f, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x3f, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, - 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x54, 0x65, 0x6e, 0x73, - 0x6f, 0x72, 0x52, 0x65, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x2e, 0x56, 0x61, 0x72, 0x4c, 0x65, 0x6e, 0x53, 0x70, 0x61, 0x72, 0x73, 0x65, 0x54, 0x65, 0x6e, - 0x73, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x12, 0x76, 0x61, 0x72, 0x6c, 0x65, 0x6e, 0x53, 0x70, 0x61, - 0x72, 0x73, 0x65, 0x54, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x12, 0x60, 0x0a, 0x0d, 0x73, 0x70, 0x61, - 0x72, 0x73, 0x65, 0x5f, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x39, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, - 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x54, 0x65, 0x6e, 0x73, 0x6f, 0x72, - 0x52, 0x65, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x53, - 0x70, 0x61, 0x72, 0x73, 0x65, 0x54, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x0c, 0x73, - 0x70, 0x61, 0x72, 0x73, 0x65, 0x54, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x1a, 0x9c, 0x01, 0x0a, 0x0c, - 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x21, 0x0a, 0x0b, - 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x01, 0x48, 0x00, 0x52, 0x0a, 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, - 0x1d, 0x0a, 0x09, 0x69, 0x6e, 0x74, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x03, 0x48, 0x00, 0x52, 0x08, 0x69, 0x6e, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x21, - 0x0a, 0x0b, 0x62, 0x79, 0x74, 0x65, 0x73, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x03, 0x20, - 0x01, 0x28, 0x0c, 0x48, 0x00, 0x52, 0x0a, 0x62, 0x79, 0x74, 0x65, 0x73, 0x56, 0x61, 0x6c, 0x75, - 0x65, 0x12, 0x1f, 0x0a, 0x0a, 0x75, 0x69, 0x6e, 0x74, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, - 0x04, 0x20, 0x01, 0x28, 0x04, 0x48, 0x00, 0x52, 0x09, 0x75, 0x69, 0x6e, 0x74, 0x56, 0x61, 0x6c, - 0x75, 0x65, 0x42, 0x06, 0x0a, 0x04, 0x6b, 0x69, 0x6e, 0x64, 0x1a, 0xc8, 0x01, 0x0a, 0x0b, 0x44, - 0x65, 0x6e, 0x73, 0x65, 0x54, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x12, 0x1f, 0x0a, 0x0b, 0x63, 0x6f, - 0x6c, 0x75, 0x6d, 0x6e, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x0a, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x38, 0x0a, 0x05, 0x73, - 0x68, 0x61, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x74, 0x65, 0x6e, - 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, - 0x2e, 0x76, 0x30, 0x2e, 0x46, 0x69, 0x78, 0x65, 0x64, 0x53, 0x68, 0x61, 0x70, 0x65, 0x52, 0x05, - 0x73, 0x68, 0x61, 0x70, 0x65, 0x12, 0x5e, 0x0a, 0x0d, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, - 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x39, 0x2e, 0x74, - 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, - 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x54, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x52, 0x65, 0x70, 0x72, - 0x65, 0x73, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x44, 0x65, 0x66, 0x61, 0x75, - 0x6c, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0c, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, - 0x56, 0x61, 0x6c, 0x75, 0x65, 0x1a, 0x35, 0x0a, 0x12, 0x56, 0x61, 0x72, 0x4c, 0x65, 0x6e, 0x53, - 0x70, 0x61, 0x72, 0x73, 0x65, 0x54, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x12, 0x1f, 0x0a, 0x0b, 0x63, - 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x0a, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x4e, 0x61, 0x6d, 0x65, 0x1a, 0xad, 0x01, 0x0a, - 0x0c, 0x53, 0x70, 0x61, 0x72, 0x73, 0x65, 0x54, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x12, 0x43, 0x0a, - 0x0b, 0x64, 0x65, 0x6e, 0x73, 0x65, 0x5f, 0x73, 0x68, 0x61, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, - 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x46, 0x69, 0x78, 0x65, - 0x64, 0x53, 0x68, 0x61, 0x70, 0x65, 0x52, 0x0a, 0x64, 0x65, 0x6e, 0x73, 0x65, 0x53, 0x68, 0x61, - 0x70, 0x65, 0x12, 0x2c, 0x0a, 0x12, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x5f, 0x63, 0x6f, 0x6c, 0x75, - 0x6d, 0x6e, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x10, - 0x69, 0x6e, 0x64, 0x65, 0x78, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x4e, 0x61, 0x6d, 0x65, 0x73, - 0x12, 0x2a, 0x0a, 0x11, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5f, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, - 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x4e, 0x61, 0x6d, 0x65, 0x42, 0x06, 0x0a, 0x04, - 0x6b, 0x69, 0x6e, 0x64, 0x22, 0x95, 0x02, 0x0a, 0x19, 0x54, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x52, - 0x65, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x47, 0x72, 0x6f, - 0x75, 0x70, 0x12, 0x80, 0x01, 0x0a, 0x15, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x5f, 0x72, 0x65, - 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x4b, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, - 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x54, 0x65, 0x6e, 0x73, - 0x6f, 0x72, 0x52, 0x65, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x47, 0x72, 0x6f, 0x75, 0x70, 0x2e, 0x54, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x52, 0x65, 0x70, 0x72, - 0x65, 0x73, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, - 0x14, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x52, 0x65, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x74, - 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x1a, 0x75, 0x0a, 0x19, 0x54, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x52, - 0x65, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x6e, 0x74, - 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x03, 0x6b, 0x65, 0x79, 0x12, 0x42, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x2c, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, - 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x54, 0x65, 0x6e, - 0x73, 0x6f, 0x72, 0x52, 0x65, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x2a, 0x75, 0x0a, 0x0e, - 0x4c, 0x69, 0x66, 0x65, 0x63, 0x79, 0x63, 0x6c, 0x65, 0x53, 0x74, 0x61, 0x67, 0x65, 0x12, 0x11, - 0x0a, 0x0d, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x5f, 0x53, 0x54, 0x41, 0x47, 0x45, 0x10, - 0x00, 0x12, 0x0b, 0x0a, 0x07, 0x50, 0x4c, 0x41, 0x4e, 0x4e, 0x45, 0x44, 0x10, 0x01, 0x12, 0x09, - 0x0a, 0x05, 0x41, 0x4c, 0x50, 0x48, 0x41, 0x10, 0x02, 0x12, 0x08, 0x0a, 0x04, 0x42, 0x45, 0x54, - 0x41, 0x10, 0x03, 0x12, 0x0e, 0x0a, 0x0a, 0x50, 0x52, 0x4f, 0x44, 0x55, 0x43, 0x54, 0x49, 0x4f, - 0x4e, 0x10, 0x04, 0x12, 0x0e, 0x0a, 0x0a, 0x44, 0x45, 0x50, 0x52, 0x45, 0x43, 0x41, 0x54, 0x45, - 0x44, 0x10, 0x05, 0x12, 0x0e, 0x0a, 0x0a, 0x44, 0x45, 0x42, 0x55, 0x47, 0x5f, 0x4f, 0x4e, 0x4c, - 0x59, 0x10, 0x06, 0x2a, 0x4a, 0x0a, 0x0b, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x54, 0x79, - 0x70, 0x65, 0x12, 0x10, 0x0a, 0x0c, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, - 0x57, 0x4e, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x42, 0x59, 0x54, 0x45, 0x53, 0x10, 0x01, 0x12, - 0x07, 0x0a, 0x03, 0x49, 0x4e, 0x54, 0x10, 0x02, 0x12, 0x09, 0x0a, 0x05, 0x46, 0x4c, 0x4f, 0x41, - 0x54, 0x10, 0x03, 0x12, 0x0a, 0x0a, 0x06, 0x53, 0x54, 0x52, 0x55, 0x43, 0x54, 0x10, 0x04, 0x42, - 0x68, 0x0a, 0x1a, 0x6f, 0x72, 0x67, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, - 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x50, 0x01, 0x5a, - 0x45, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x65, 0x61, 0x73, - 0x74, 0x2d, 0x64, 0x65, 0x76, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x73, 0x64, 0x6b, 0x2f, - 0x67, 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, - 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2f, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x2f, 0x76, 0x30, 0xf8, 0x01, 0x01, -} - -var ( - file_tensorflow_metadata_proto_v0_schema_proto_rawDescOnce sync.Once - file_tensorflow_metadata_proto_v0_schema_proto_rawDescData = file_tensorflow_metadata_proto_v0_schema_proto_rawDesc -) - -func file_tensorflow_metadata_proto_v0_schema_proto_rawDescGZIP() []byte { - file_tensorflow_metadata_proto_v0_schema_proto_rawDescOnce.Do(func() { - file_tensorflow_metadata_proto_v0_schema_proto_rawDescData = protoimpl.X.CompressGZIP(file_tensorflow_metadata_proto_v0_schema_proto_rawDescData) - }) - return file_tensorflow_metadata_proto_v0_schema_proto_rawDescData -} - -var file_tensorflow_metadata_proto_v0_schema_proto_enumTypes = make([]protoimpl.EnumInfo, 4) -var file_tensorflow_metadata_proto_v0_schema_proto_msgTypes = make([]protoimpl.MessageInfo, 36) -var file_tensorflow_metadata_proto_v0_schema_proto_goTypes = []interface{}{ - (LifecycleStage)(0), // 0: tensorflow.metadata.v0.LifecycleStage - (FeatureType)(0), // 1: tensorflow.metadata.v0.FeatureType - (TimeDomain_IntegerTimeFormat)(0), // 2: tensorflow.metadata.v0.TimeDomain.IntegerTimeFormat - (TimeOfDayDomain_IntegerTimeOfDayFormat)(0), // 3: tensorflow.metadata.v0.TimeOfDayDomain.IntegerTimeOfDayFormat - (*Schema)(nil), // 4: tensorflow.metadata.v0.Schema - (*Feature)(nil), // 5: tensorflow.metadata.v0.Feature - (*Annotation)(nil), // 6: tensorflow.metadata.v0.Annotation - (*NumericValueComparator)(nil), // 7: tensorflow.metadata.v0.NumericValueComparator - (*DatasetConstraints)(nil), // 8: tensorflow.metadata.v0.DatasetConstraints - (*FixedShape)(nil), // 9: tensorflow.metadata.v0.FixedShape - (*ValueCount)(nil), // 10: tensorflow.metadata.v0.ValueCount - (*WeightedFeature)(nil), // 11: tensorflow.metadata.v0.WeightedFeature - (*SparseFeature)(nil), // 12: tensorflow.metadata.v0.SparseFeature - (*DistributionConstraints)(nil), // 13: tensorflow.metadata.v0.DistributionConstraints - (*IntDomain)(nil), // 14: tensorflow.metadata.v0.IntDomain - (*FloatDomain)(nil), // 15: tensorflow.metadata.v0.FloatDomain - (*StructDomain)(nil), // 16: tensorflow.metadata.v0.StructDomain - (*StringDomain)(nil), // 17: tensorflow.metadata.v0.StringDomain - (*BoolDomain)(nil), // 18: tensorflow.metadata.v0.BoolDomain - (*NaturalLanguageDomain)(nil), // 19: tensorflow.metadata.v0.NaturalLanguageDomain - (*ImageDomain)(nil), // 20: tensorflow.metadata.v0.ImageDomain - (*MIDDomain)(nil), // 21: tensorflow.metadata.v0.MIDDomain - (*URLDomain)(nil), // 22: tensorflow.metadata.v0.URLDomain - (*TimeDomain)(nil), // 23: tensorflow.metadata.v0.TimeDomain - (*TimeOfDayDomain)(nil), // 24: tensorflow.metadata.v0.TimeOfDayDomain - (*FeaturePresence)(nil), // 25: tensorflow.metadata.v0.FeaturePresence - (*FeaturePresenceWithinGroup)(nil), // 26: tensorflow.metadata.v0.FeaturePresenceWithinGroup - (*InfinityNorm)(nil), // 27: tensorflow.metadata.v0.InfinityNorm - (*FeatureComparator)(nil), // 28: tensorflow.metadata.v0.FeatureComparator - (*TensorRepresentation)(nil), // 29: tensorflow.metadata.v0.TensorRepresentation - (*TensorRepresentationGroup)(nil), // 30: tensorflow.metadata.v0.TensorRepresentationGroup - nil, // 31: tensorflow.metadata.v0.Schema.TensorRepresentationGroupEntry - (*FixedShape_Dim)(nil), // 32: tensorflow.metadata.v0.FixedShape.Dim - (*SparseFeature_IndexFeature)(nil), // 33: tensorflow.metadata.v0.SparseFeature.IndexFeature - (*SparseFeature_ValueFeature)(nil), // 34: tensorflow.metadata.v0.SparseFeature.ValueFeature - (*TensorRepresentation_DefaultValue)(nil), // 35: tensorflow.metadata.v0.TensorRepresentation.DefaultValue - (*TensorRepresentation_DenseTensor)(nil), // 36: tensorflow.metadata.v0.TensorRepresentation.DenseTensor - (*TensorRepresentation_VarLenSparseTensor)(nil), // 37: tensorflow.metadata.v0.TensorRepresentation.VarLenSparseTensor - (*TensorRepresentation_SparseTensor)(nil), // 38: tensorflow.metadata.v0.TensorRepresentation.SparseTensor - nil, // 39: tensorflow.metadata.v0.TensorRepresentationGroup.TensorRepresentationEntry - (*any.Any)(nil), // 40: google.protobuf.Any - (*Path)(nil), // 41: tensorflow.metadata.v0.Path -} -var file_tensorflow_metadata_proto_v0_schema_proto_depIdxs = []int32{ - 5, // 0: tensorflow.metadata.v0.Schema.feature:type_name -> tensorflow.metadata.v0.Feature - 12, // 1: tensorflow.metadata.v0.Schema.sparse_feature:type_name -> tensorflow.metadata.v0.SparseFeature - 11, // 2: tensorflow.metadata.v0.Schema.weighted_feature:type_name -> tensorflow.metadata.v0.WeightedFeature - 17, // 3: tensorflow.metadata.v0.Schema.string_domain:type_name -> tensorflow.metadata.v0.StringDomain - 15, // 4: tensorflow.metadata.v0.Schema.float_domain:type_name -> tensorflow.metadata.v0.FloatDomain - 14, // 5: tensorflow.metadata.v0.Schema.int_domain:type_name -> tensorflow.metadata.v0.IntDomain - 6, // 6: tensorflow.metadata.v0.Schema.annotation:type_name -> tensorflow.metadata.v0.Annotation - 8, // 7: tensorflow.metadata.v0.Schema.dataset_constraints:type_name -> tensorflow.metadata.v0.DatasetConstraints - 31, // 8: tensorflow.metadata.v0.Schema.tensor_representation_group:type_name -> tensorflow.metadata.v0.Schema.TensorRepresentationGroupEntry - 25, // 9: tensorflow.metadata.v0.Feature.presence:type_name -> tensorflow.metadata.v0.FeaturePresence - 26, // 10: tensorflow.metadata.v0.Feature.group_presence:type_name -> tensorflow.metadata.v0.FeaturePresenceWithinGroup - 9, // 11: tensorflow.metadata.v0.Feature.shape:type_name -> tensorflow.metadata.v0.FixedShape - 10, // 12: tensorflow.metadata.v0.Feature.value_count:type_name -> tensorflow.metadata.v0.ValueCount - 1, // 13: tensorflow.metadata.v0.Feature.type:type_name -> tensorflow.metadata.v0.FeatureType - 14, // 14: tensorflow.metadata.v0.Feature.int_domain:type_name -> tensorflow.metadata.v0.IntDomain - 15, // 15: tensorflow.metadata.v0.Feature.float_domain:type_name -> tensorflow.metadata.v0.FloatDomain - 17, // 16: tensorflow.metadata.v0.Feature.string_domain:type_name -> tensorflow.metadata.v0.StringDomain - 18, // 17: tensorflow.metadata.v0.Feature.bool_domain:type_name -> tensorflow.metadata.v0.BoolDomain - 16, // 18: tensorflow.metadata.v0.Feature.struct_domain:type_name -> tensorflow.metadata.v0.StructDomain - 19, // 19: tensorflow.metadata.v0.Feature.natural_language_domain:type_name -> tensorflow.metadata.v0.NaturalLanguageDomain - 20, // 20: tensorflow.metadata.v0.Feature.image_domain:type_name -> tensorflow.metadata.v0.ImageDomain - 21, // 21: tensorflow.metadata.v0.Feature.mid_domain:type_name -> tensorflow.metadata.v0.MIDDomain - 22, // 22: tensorflow.metadata.v0.Feature.url_domain:type_name -> tensorflow.metadata.v0.URLDomain - 23, // 23: tensorflow.metadata.v0.Feature.time_domain:type_name -> tensorflow.metadata.v0.TimeDomain - 24, // 24: tensorflow.metadata.v0.Feature.time_of_day_domain:type_name -> tensorflow.metadata.v0.TimeOfDayDomain - 13, // 25: tensorflow.metadata.v0.Feature.distribution_constraints:type_name -> tensorflow.metadata.v0.DistributionConstraints - 6, // 26: tensorflow.metadata.v0.Feature.annotation:type_name -> tensorflow.metadata.v0.Annotation - 28, // 27: tensorflow.metadata.v0.Feature.skew_comparator:type_name -> tensorflow.metadata.v0.FeatureComparator - 28, // 28: tensorflow.metadata.v0.Feature.drift_comparator:type_name -> tensorflow.metadata.v0.FeatureComparator - 0, // 29: tensorflow.metadata.v0.Feature.lifecycle_stage:type_name -> tensorflow.metadata.v0.LifecycleStage - 40, // 30: tensorflow.metadata.v0.Annotation.extra_metadata:type_name -> google.protobuf.Any - 7, // 31: tensorflow.metadata.v0.DatasetConstraints.num_examples_drift_comparator:type_name -> tensorflow.metadata.v0.NumericValueComparator - 7, // 32: tensorflow.metadata.v0.DatasetConstraints.num_examples_version_comparator:type_name -> tensorflow.metadata.v0.NumericValueComparator - 32, // 33: tensorflow.metadata.v0.FixedShape.dim:type_name -> tensorflow.metadata.v0.FixedShape.Dim - 41, // 34: tensorflow.metadata.v0.WeightedFeature.feature:type_name -> tensorflow.metadata.v0.Path - 41, // 35: tensorflow.metadata.v0.WeightedFeature.weight_feature:type_name -> tensorflow.metadata.v0.Path - 0, // 36: tensorflow.metadata.v0.WeightedFeature.lifecycle_stage:type_name -> tensorflow.metadata.v0.LifecycleStage - 0, // 37: tensorflow.metadata.v0.SparseFeature.lifecycle_stage:type_name -> tensorflow.metadata.v0.LifecycleStage - 25, // 38: tensorflow.metadata.v0.SparseFeature.presence:type_name -> tensorflow.metadata.v0.FeaturePresence - 9, // 39: tensorflow.metadata.v0.SparseFeature.dense_shape:type_name -> tensorflow.metadata.v0.FixedShape - 33, // 40: tensorflow.metadata.v0.SparseFeature.index_feature:type_name -> tensorflow.metadata.v0.SparseFeature.IndexFeature - 34, // 41: tensorflow.metadata.v0.SparseFeature.value_feature:type_name -> tensorflow.metadata.v0.SparseFeature.ValueFeature - 1, // 42: tensorflow.metadata.v0.SparseFeature.type:type_name -> tensorflow.metadata.v0.FeatureType - 5, // 43: tensorflow.metadata.v0.StructDomain.feature:type_name -> tensorflow.metadata.v0.Feature - 12, // 44: tensorflow.metadata.v0.StructDomain.sparse_feature:type_name -> tensorflow.metadata.v0.SparseFeature - 2, // 45: tensorflow.metadata.v0.TimeDomain.integer_format:type_name -> tensorflow.metadata.v0.TimeDomain.IntegerTimeFormat - 3, // 46: tensorflow.metadata.v0.TimeOfDayDomain.integer_format:type_name -> tensorflow.metadata.v0.TimeOfDayDomain.IntegerTimeOfDayFormat - 27, // 47: tensorflow.metadata.v0.FeatureComparator.infinity_norm:type_name -> tensorflow.metadata.v0.InfinityNorm - 36, // 48: tensorflow.metadata.v0.TensorRepresentation.dense_tensor:type_name -> tensorflow.metadata.v0.TensorRepresentation.DenseTensor - 37, // 49: tensorflow.metadata.v0.TensorRepresentation.varlen_sparse_tensor:type_name -> tensorflow.metadata.v0.TensorRepresentation.VarLenSparseTensor - 38, // 50: tensorflow.metadata.v0.TensorRepresentation.sparse_tensor:type_name -> tensorflow.metadata.v0.TensorRepresentation.SparseTensor - 39, // 51: tensorflow.metadata.v0.TensorRepresentationGroup.tensor_representation:type_name -> tensorflow.metadata.v0.TensorRepresentationGroup.TensorRepresentationEntry - 30, // 52: tensorflow.metadata.v0.Schema.TensorRepresentationGroupEntry.value:type_name -> tensorflow.metadata.v0.TensorRepresentationGroup - 9, // 53: tensorflow.metadata.v0.TensorRepresentation.DenseTensor.shape:type_name -> tensorflow.metadata.v0.FixedShape - 35, // 54: tensorflow.metadata.v0.TensorRepresentation.DenseTensor.default_value:type_name -> tensorflow.metadata.v0.TensorRepresentation.DefaultValue - 9, // 55: tensorflow.metadata.v0.TensorRepresentation.SparseTensor.dense_shape:type_name -> tensorflow.metadata.v0.FixedShape - 29, // 56: tensorflow.metadata.v0.TensorRepresentationGroup.TensorRepresentationEntry.value:type_name -> tensorflow.metadata.v0.TensorRepresentation - 57, // [57:57] is the sub-list for method output_type - 57, // [57:57] is the sub-list for method input_type - 57, // [57:57] is the sub-list for extension type_name - 57, // [57:57] is the sub-list for extension extendee - 0, // [0:57] is the sub-list for field type_name -} - -func init() { file_tensorflow_metadata_proto_v0_schema_proto_init() } -func file_tensorflow_metadata_proto_v0_schema_proto_init() { - if File_tensorflow_metadata_proto_v0_schema_proto != nil { - return - } - file_tensorflow_metadata_proto_v0_path_proto_init() - if !protoimpl.UnsafeEnabled { - file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Schema); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Feature); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Annotation); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*NumericValueComparator); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DatasetConstraints); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*FixedShape); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ValueCount); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*WeightedFeature); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*SparseFeature); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DistributionConstraints); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*IntDomain); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*FloatDomain); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*StructDomain); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*StringDomain); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*BoolDomain); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*NaturalLanguageDomain); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ImageDomain); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*MIDDomain); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*URLDomain); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TimeDomain); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TimeOfDayDomain); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[21].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*FeaturePresence); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*FeaturePresenceWithinGroup); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[23].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*InfinityNorm); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[24].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*FeatureComparator); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[25].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TensorRepresentation); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[26].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TensorRepresentationGroup); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[28].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*FixedShape_Dim); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[29].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*SparseFeature_IndexFeature); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[30].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*SparseFeature_ValueFeature); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[31].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TensorRepresentation_DefaultValue); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[32].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TensorRepresentation_DenseTensor); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[33].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TensorRepresentation_VarLenSparseTensor); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[34].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TensorRepresentation_SparseTensor); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[1].OneofWrappers = []interface{}{ - (*Feature_Presence)(nil), - (*Feature_GroupPresence)(nil), - (*Feature_Shape)(nil), - (*Feature_ValueCount)(nil), - (*Feature_Domain)(nil), - (*Feature_IntDomain)(nil), - (*Feature_FloatDomain)(nil), - (*Feature_StringDomain)(nil), - (*Feature_BoolDomain)(nil), - (*Feature_StructDomain)(nil), - (*Feature_NaturalLanguageDomain)(nil), - (*Feature_ImageDomain)(nil), - (*Feature_MidDomain)(nil), - (*Feature_UrlDomain)(nil), - (*Feature_TimeDomain)(nil), - (*Feature_TimeOfDayDomain)(nil), - } - file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[19].OneofWrappers = []interface{}{ - (*TimeDomain_StringFormat)(nil), - (*TimeDomain_IntegerFormat)(nil), - } - file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[20].OneofWrappers = []interface{}{ - (*TimeOfDayDomain_StringFormat)(nil), - (*TimeOfDayDomain_IntegerFormat)(nil), - } - file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[25].OneofWrappers = []interface{}{ - (*TensorRepresentation_DenseTensor_)(nil), - (*TensorRepresentation_VarlenSparseTensor)(nil), - (*TensorRepresentation_SparseTensor_)(nil), - } - file_tensorflow_metadata_proto_v0_schema_proto_msgTypes[31].OneofWrappers = []interface{}{ - (*TensorRepresentation_DefaultValue_FloatValue)(nil), - (*TensorRepresentation_DefaultValue_IntValue)(nil), - (*TensorRepresentation_DefaultValue_BytesValue)(nil), - (*TensorRepresentation_DefaultValue_UintValue)(nil), - } - type x struct{} - out := protoimpl.TypeBuilder{ - File: protoimpl.DescBuilder{ - GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_tensorflow_metadata_proto_v0_schema_proto_rawDesc, - NumEnums: 4, - NumMessages: 36, - NumExtensions: 0, - NumServices: 0, - }, - GoTypes: file_tensorflow_metadata_proto_v0_schema_proto_goTypes, - DependencyIndexes: file_tensorflow_metadata_proto_v0_schema_proto_depIdxs, - EnumInfos: file_tensorflow_metadata_proto_v0_schema_proto_enumTypes, - MessageInfos: file_tensorflow_metadata_proto_v0_schema_proto_msgTypes, - }.Build() - File_tensorflow_metadata_proto_v0_schema_proto = out.File - file_tensorflow_metadata_proto_v0_schema_proto_rawDesc = nil - file_tensorflow_metadata_proto_v0_schema_proto_goTypes = nil - file_tensorflow_metadata_proto_v0_schema_proto_depIdxs = nil -} diff --git a/sdk/go/protos/tensorflow_metadata/proto/v0/statistics.pb.go b/sdk/go/protos/tensorflow_metadata/proto/v0/statistics.pb.go deleted file mode 100644 index fbf6247a1d..0000000000 --- a/sdk/go/protos/tensorflow_metadata/proto/v0/statistics.pb.go +++ /dev/null @@ -1,3120 +0,0 @@ -// Copyright 2017 The TensorFlow Authors. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// ============================================================================= - -// Definitions for aggregated feature statistics for datasets. -// TODO(b/80075690): make a Javascript build rule for this. -// TODO(b/80075691): migrate Facets to use this. - -// Code generated by protoc-gen-go. DO NOT EDIT. -// versions: -// protoc-gen-go v1.25.0 -// protoc v3.12.4 -// source: tensorflow_metadata/proto/v0/statistics.proto - -package v0 - -import ( - proto "github.com/golang/protobuf/proto" - protoreflect "google.golang.org/protobuf/reflect/protoreflect" - protoimpl "google.golang.org/protobuf/runtime/protoimpl" - reflect "reflect" - sync "sync" -) - -const ( - // Verify that this generated code is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) - // Verify that runtime/protoimpl is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) -) - -// This is a compile-time assertion that a sufficiently up-to-date version -// of the legacy proto package is being used. -const _ = proto.ProtoPackageIsVersion4 - -// The types supported by the feature statistics. When aggregating -// tf.Examples, if the bytelist contains a string, it is recommended to encode -// it here as STRING instead of BYTES in order to calculate string-specific -// statistical measures. -type FeatureNameStatistics_Type int32 - -const ( - FeatureNameStatistics_INT FeatureNameStatistics_Type = 0 - FeatureNameStatistics_FLOAT FeatureNameStatistics_Type = 1 - FeatureNameStatistics_STRING FeatureNameStatistics_Type = 2 - FeatureNameStatistics_BYTES FeatureNameStatistics_Type = 3 - FeatureNameStatistics_STRUCT FeatureNameStatistics_Type = 4 -) - -// Enum value maps for FeatureNameStatistics_Type. -var ( - FeatureNameStatistics_Type_name = map[int32]string{ - 0: "INT", - 1: "FLOAT", - 2: "STRING", - 3: "BYTES", - 4: "STRUCT", - } - FeatureNameStatistics_Type_value = map[string]int32{ - "INT": 0, - "FLOAT": 1, - "STRING": 2, - "BYTES": 3, - "STRUCT": 4, - } -) - -func (x FeatureNameStatistics_Type) Enum() *FeatureNameStatistics_Type { - p := new(FeatureNameStatistics_Type) - *p = x - return p -} - -func (x FeatureNameStatistics_Type) String() string { - return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) -} - -func (FeatureNameStatistics_Type) Descriptor() protoreflect.EnumDescriptor { - return file_tensorflow_metadata_proto_v0_statistics_proto_enumTypes[0].Descriptor() -} - -func (FeatureNameStatistics_Type) Type() protoreflect.EnumType { - return &file_tensorflow_metadata_proto_v0_statistics_proto_enumTypes[0] -} - -func (x FeatureNameStatistics_Type) Number() protoreflect.EnumNumber { - return protoreflect.EnumNumber(x) -} - -// Deprecated: Use FeatureNameStatistics_Type.Descriptor instead. -func (FeatureNameStatistics_Type) EnumDescriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_statistics_proto_rawDescGZIP(), []int{7, 0} -} - -// The type of the histogram. A standard histogram has equal-width buckets. -// The quantiles type is used for when the histogram message is used to store -// quantile information (by using equal-count buckets with variable widths). -type Histogram_HistogramType int32 - -const ( - Histogram_STANDARD Histogram_HistogramType = 0 - Histogram_QUANTILES Histogram_HistogramType = 1 -) - -// Enum value maps for Histogram_HistogramType. -var ( - Histogram_HistogramType_name = map[int32]string{ - 0: "STANDARD", - 1: "QUANTILES", - } - Histogram_HistogramType_value = map[string]int32{ - "STANDARD": 0, - "QUANTILES": 1, - } -) - -func (x Histogram_HistogramType) Enum() *Histogram_HistogramType { - p := new(Histogram_HistogramType) - *p = x - return p -} - -func (x Histogram_HistogramType) String() string { - return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) -} - -func (Histogram_HistogramType) Descriptor() protoreflect.EnumDescriptor { - return file_tensorflow_metadata_proto_v0_statistics_proto_enumTypes[1].Descriptor() -} - -func (Histogram_HistogramType) Type() protoreflect.EnumType { - return &file_tensorflow_metadata_proto_v0_statistics_proto_enumTypes[1] -} - -func (x Histogram_HistogramType) Number() protoreflect.EnumNumber { - return protoreflect.EnumNumber(x) -} - -// Deprecated: Use Histogram_HistogramType.Descriptor instead. -func (Histogram_HistogramType) EnumDescriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_statistics_proto_rawDescGZIP(), []int{17, 0} -} - -// A list of features statistics for different datasets. If you wish to compare -// different datasets using this list, then the DatasetFeatureStatistics -// entries should all contain the same list of features. -type DatasetFeatureStatisticsList struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Datasets []*DatasetFeatureStatistics `protobuf:"bytes,1,rep,name=datasets,proto3" json:"datasets,omitempty"` -} - -func (x *DatasetFeatureStatisticsList) Reset() { - *x = DatasetFeatureStatisticsList{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *DatasetFeatureStatisticsList) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*DatasetFeatureStatisticsList) ProtoMessage() {} - -func (x *DatasetFeatureStatisticsList) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use DatasetFeatureStatisticsList.ProtoReflect.Descriptor instead. -func (*DatasetFeatureStatisticsList) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_statistics_proto_rawDescGZIP(), []int{0} -} - -func (x *DatasetFeatureStatisticsList) GetDatasets() []*DatasetFeatureStatistics { - if x != nil { - return x.Datasets - } - return nil -} - -// The feature statistics for a single dataset. -type DatasetFeatureStatistics struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // The name of the dataset. - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - // The number of examples in the dataset. - NumExamples uint64 `protobuf:"varint,2,opt,name=num_examples,json=numExamples,proto3" json:"num_examples,omitempty"` - // Only valid if the weight feature was specified. - // Treats a missing weighted feature as zero. - WeightedNumExamples float64 `protobuf:"fixed64,4,opt,name=weighted_num_examples,json=weightedNumExamples,proto3" json:"weighted_num_examples,omitempty"` - // The feature statistics for the dataset. - Features []*FeatureNameStatistics `protobuf:"bytes,3,rep,name=features,proto3" json:"features,omitempty"` - // Cross feature statistics for the dataset. - CrossFeatures []*CrossFeatureStatistics `protobuf:"bytes,5,rep,name=cross_features,json=crossFeatures,proto3" json:"cross_features,omitempty"` -} - -func (x *DatasetFeatureStatistics) Reset() { - *x = DatasetFeatureStatistics{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *DatasetFeatureStatistics) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*DatasetFeatureStatistics) ProtoMessage() {} - -func (x *DatasetFeatureStatistics) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use DatasetFeatureStatistics.ProtoReflect.Descriptor instead. -func (*DatasetFeatureStatistics) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_statistics_proto_rawDescGZIP(), []int{1} -} - -func (x *DatasetFeatureStatistics) GetName() string { - if x != nil { - return x.Name - } - return "" -} - -func (x *DatasetFeatureStatistics) GetNumExamples() uint64 { - if x != nil { - return x.NumExamples - } - return 0 -} - -func (x *DatasetFeatureStatistics) GetWeightedNumExamples() float64 { - if x != nil { - return x.WeightedNumExamples - } - return 0 -} - -func (x *DatasetFeatureStatistics) GetFeatures() []*FeatureNameStatistics { - if x != nil { - return x.Features - } - return nil -} - -func (x *DatasetFeatureStatistics) GetCrossFeatures() []*CrossFeatureStatistics { - if x != nil { - return x.CrossFeatures - } - return nil -} - -type CrossFeatureStatistics struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // The path of feature x. - PathX *Path `protobuf:"bytes,1,opt,name=path_x,json=pathX,proto3" json:"path_x,omitempty"` - // The path of feature y. - PathY *Path `protobuf:"bytes,2,opt,name=path_y,json=pathY,proto3" json:"path_y,omitempty"` - // Number of occurrences of this feature cross in the data. If any of - // the features in the cross is missing, the example is ignored. - Count uint64 `protobuf:"varint,3,opt,name=count,proto3" json:"count,omitempty"` - // Types that are assignable to CrossStats: - // *CrossFeatureStatistics_NumCrossStats - // *CrossFeatureStatistics_CategoricalCrossStats - CrossStats isCrossFeatureStatistics_CrossStats `protobuf_oneof:"cross_stats"` -} - -func (x *CrossFeatureStatistics) Reset() { - *x = CrossFeatureStatistics{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *CrossFeatureStatistics) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*CrossFeatureStatistics) ProtoMessage() {} - -func (x *CrossFeatureStatistics) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use CrossFeatureStatistics.ProtoReflect.Descriptor instead. -func (*CrossFeatureStatistics) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_statistics_proto_rawDescGZIP(), []int{2} -} - -func (x *CrossFeatureStatistics) GetPathX() *Path { - if x != nil { - return x.PathX - } - return nil -} - -func (x *CrossFeatureStatistics) GetPathY() *Path { - if x != nil { - return x.PathY - } - return nil -} - -func (x *CrossFeatureStatistics) GetCount() uint64 { - if x != nil { - return x.Count - } - return 0 -} - -func (m *CrossFeatureStatistics) GetCrossStats() isCrossFeatureStatistics_CrossStats { - if m != nil { - return m.CrossStats - } - return nil -} - -func (x *CrossFeatureStatistics) GetNumCrossStats() *NumericCrossStatistics { - if x, ok := x.GetCrossStats().(*CrossFeatureStatistics_NumCrossStats); ok { - return x.NumCrossStats - } - return nil -} - -func (x *CrossFeatureStatistics) GetCategoricalCrossStats() *CategoricalCrossStatistics { - if x, ok := x.GetCrossStats().(*CrossFeatureStatistics_CategoricalCrossStats); ok { - return x.CategoricalCrossStats - } - return nil -} - -type isCrossFeatureStatistics_CrossStats interface { - isCrossFeatureStatistics_CrossStats() -} - -type CrossFeatureStatistics_NumCrossStats struct { - NumCrossStats *NumericCrossStatistics `protobuf:"bytes,4,opt,name=num_cross_stats,json=numCrossStats,proto3,oneof"` -} - -type CrossFeatureStatistics_CategoricalCrossStats struct { - CategoricalCrossStats *CategoricalCrossStatistics `protobuf:"bytes,5,opt,name=categorical_cross_stats,json=categoricalCrossStats,proto3,oneof"` -} - -func (*CrossFeatureStatistics_NumCrossStats) isCrossFeatureStatistics_CrossStats() {} - -func (*CrossFeatureStatistics_CategoricalCrossStats) isCrossFeatureStatistics_CrossStats() {} - -type NumericCrossStatistics struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Pearson product-moment correlation coefficient. - Correlation float32 `protobuf:"fixed32,1,opt,name=correlation,proto3" json:"correlation,omitempty"` - // Standard covariance. E[(X-E[X])*(Y-E[Y])] - Covariance float32 `protobuf:"fixed32,2,opt,name=covariance,proto3" json:"covariance,omitempty"` -} - -func (x *NumericCrossStatistics) Reset() { - *x = NumericCrossStatistics{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[3] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *NumericCrossStatistics) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*NumericCrossStatistics) ProtoMessage() {} - -func (x *NumericCrossStatistics) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[3] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use NumericCrossStatistics.ProtoReflect.Descriptor instead. -func (*NumericCrossStatistics) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_statistics_proto_rawDescGZIP(), []int{3} -} - -func (x *NumericCrossStatistics) GetCorrelation() float32 { - if x != nil { - return x.Correlation - } - return 0 -} - -func (x *NumericCrossStatistics) GetCovariance() float32 { - if x != nil { - return x.Covariance - } - return 0 -} - -type CategoricalCrossStatistics struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Lift *LiftStatistics `protobuf:"bytes,1,opt,name=lift,proto3" json:"lift,omitempty"` -} - -func (x *CategoricalCrossStatistics) Reset() { - *x = CategoricalCrossStatistics{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[4] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *CategoricalCrossStatistics) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*CategoricalCrossStatistics) ProtoMessage() {} - -func (x *CategoricalCrossStatistics) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[4] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use CategoricalCrossStatistics.ProtoReflect.Descriptor instead. -func (*CategoricalCrossStatistics) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_statistics_proto_rawDescGZIP(), []int{4} -} - -func (x *CategoricalCrossStatistics) GetLift() *LiftStatistics { - if x != nil { - return x.Lift - } - return nil -} - -type LiftStatistics struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Lift information for each value of path_y. Lift is defined for each pair of - // values (x,y) as P(path_y=y|path_x=x)/P(path_y=y). - LiftSeries []*LiftSeries `protobuf:"bytes,1,rep,name=lift_series,json=liftSeries,proto3" json:"lift_series,omitempty"` - // Weighted lift information for each value of path_y. Weighted lift is - // defined for each pair of values (x,y) as P(path_y=y|path_x=x)/P(path_y=y) - // where probabilities are computed over weighted example space. - WeightedLiftSeries []*LiftSeries `protobuf:"bytes,2,rep,name=weighted_lift_series,json=weightedLiftSeries,proto3" json:"weighted_lift_series,omitempty"` -} - -func (x *LiftStatistics) Reset() { - *x = LiftStatistics{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[5] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *LiftStatistics) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*LiftStatistics) ProtoMessage() {} - -func (x *LiftStatistics) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[5] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use LiftStatistics.ProtoReflect.Descriptor instead. -func (*LiftStatistics) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_statistics_proto_rawDescGZIP(), []int{5} -} - -func (x *LiftStatistics) GetLiftSeries() []*LiftSeries { - if x != nil { - return x.LiftSeries - } - return nil -} - -func (x *LiftStatistics) GetWeightedLiftSeries() []*LiftSeries { - if x != nil { - return x.WeightedLiftSeries - } - return nil -} - -// Container for lift information for a specific y-value. -type LiftSeries struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // The particular value of path_y corresponding to this LiftSeries. Each - // element in lift_values corresponds to the lift a different x_value and - // this specific y_value. - // - // Types that are assignable to YValue: - // *LiftSeries_YInt - // *LiftSeries_YString - // *LiftSeries_YBucket - YValue isLiftSeries_YValue `protobuf_oneof:"y_value"` - // The number of examples in which y_value appears. - // - // Types that are assignable to YCountValue: - // *LiftSeries_YCount - // *LiftSeries_WeightedYCount - YCountValue isLiftSeries_YCountValue `protobuf_oneof:"y_count_value"` - // The lifts for a each path_x value and this y_value. - LiftValues []*LiftSeries_LiftValue `protobuf:"bytes,6,rep,name=lift_values,json=liftValues,proto3" json:"lift_values,omitempty"` -} - -func (x *LiftSeries) Reset() { - *x = LiftSeries{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[6] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *LiftSeries) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*LiftSeries) ProtoMessage() {} - -func (x *LiftSeries) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[6] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use LiftSeries.ProtoReflect.Descriptor instead. -func (*LiftSeries) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_statistics_proto_rawDescGZIP(), []int{6} -} - -func (m *LiftSeries) GetYValue() isLiftSeries_YValue { - if m != nil { - return m.YValue - } - return nil -} - -func (x *LiftSeries) GetYInt() int32 { - if x, ok := x.GetYValue().(*LiftSeries_YInt); ok { - return x.YInt - } - return 0 -} - -func (x *LiftSeries) GetYString() string { - if x, ok := x.GetYValue().(*LiftSeries_YString); ok { - return x.YString - } - return "" -} - -func (x *LiftSeries) GetYBucket() *LiftSeries_Bucket { - if x, ok := x.GetYValue().(*LiftSeries_YBucket); ok { - return x.YBucket - } - return nil -} - -func (m *LiftSeries) GetYCountValue() isLiftSeries_YCountValue { - if m != nil { - return m.YCountValue - } - return nil -} - -func (x *LiftSeries) GetYCount() uint64 { - if x, ok := x.GetYCountValue().(*LiftSeries_YCount); ok { - return x.YCount - } - return 0 -} - -func (x *LiftSeries) GetWeightedYCount() float64 { - if x, ok := x.GetYCountValue().(*LiftSeries_WeightedYCount); ok { - return x.WeightedYCount - } - return 0 -} - -func (x *LiftSeries) GetLiftValues() []*LiftSeries_LiftValue { - if x != nil { - return x.LiftValues - } - return nil -} - -type isLiftSeries_YValue interface { - isLiftSeries_YValue() -} - -type LiftSeries_YInt struct { - YInt int32 `protobuf:"varint,1,opt,name=y_int,json=yInt,proto3,oneof"` -} - -type LiftSeries_YString struct { - YString string `protobuf:"bytes,2,opt,name=y_string,json=yString,proto3,oneof"` -} - -type LiftSeries_YBucket struct { - YBucket *LiftSeries_Bucket `protobuf:"bytes,3,opt,name=y_bucket,json=yBucket,proto3,oneof"` -} - -func (*LiftSeries_YInt) isLiftSeries_YValue() {} - -func (*LiftSeries_YString) isLiftSeries_YValue() {} - -func (*LiftSeries_YBucket) isLiftSeries_YValue() {} - -type isLiftSeries_YCountValue interface { - isLiftSeries_YCountValue() -} - -type LiftSeries_YCount struct { - YCount uint64 `protobuf:"varint,4,opt,name=y_count,json=yCount,proto3,oneof"` -} - -type LiftSeries_WeightedYCount struct { - WeightedYCount float64 `protobuf:"fixed64,5,opt,name=weighted_y_count,json=weightedYCount,proto3,oneof"` -} - -func (*LiftSeries_YCount) isLiftSeries_YCountValue() {} - -func (*LiftSeries_WeightedYCount) isLiftSeries_YCountValue() {} - -// The complete set of statistics for a given feature name for a dataset. -type FeatureNameStatistics struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // One can identify a field either by the name (for simple fields), or by - // a path (for structured fields). Note that: - // name: "foo" - // is equivalent to: - // path: {step:"foo"} - // Note: this oneof must be consistently either name or path across all - // FeatureNameStatistics in one DatasetFeatureStatistics. - // - // Types that are assignable to FieldId: - // *FeatureNameStatistics_Name - // *FeatureNameStatistics_Path - FieldId isFeatureNameStatistics_FieldId `protobuf_oneof:"field_id"` - // The data type of the feature - Type FeatureNameStatistics_Type `protobuf:"varint,2,opt,name=type,proto3,enum=tensorflow.metadata.v0.FeatureNameStatistics_Type" json:"type,omitempty"` - // The statistics of the values of the feature. - // - // Types that are assignable to Stats: - // *FeatureNameStatistics_NumStats - // *FeatureNameStatistics_StringStats - // *FeatureNameStatistics_BytesStats - // *FeatureNameStatistics_StructStats - Stats isFeatureNameStatistics_Stats `protobuf_oneof:"stats"` - // Any custom statistics can be stored in this list. - CustomStats []*CustomStatistic `protobuf:"bytes,6,rep,name=custom_stats,json=customStats,proto3" json:"custom_stats,omitempty"` -} - -func (x *FeatureNameStatistics) Reset() { - *x = FeatureNameStatistics{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[7] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *FeatureNameStatistics) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*FeatureNameStatistics) ProtoMessage() {} - -func (x *FeatureNameStatistics) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[7] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use FeatureNameStatistics.ProtoReflect.Descriptor instead. -func (*FeatureNameStatistics) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_statistics_proto_rawDescGZIP(), []int{7} -} - -func (m *FeatureNameStatistics) GetFieldId() isFeatureNameStatistics_FieldId { - if m != nil { - return m.FieldId - } - return nil -} - -func (x *FeatureNameStatistics) GetName() string { - if x, ok := x.GetFieldId().(*FeatureNameStatistics_Name); ok { - return x.Name - } - return "" -} - -func (x *FeatureNameStatistics) GetPath() *Path { - if x, ok := x.GetFieldId().(*FeatureNameStatistics_Path); ok { - return x.Path - } - return nil -} - -func (x *FeatureNameStatistics) GetType() FeatureNameStatistics_Type { - if x != nil { - return x.Type - } - return FeatureNameStatistics_INT -} - -func (m *FeatureNameStatistics) GetStats() isFeatureNameStatistics_Stats { - if m != nil { - return m.Stats - } - return nil -} - -func (x *FeatureNameStatistics) GetNumStats() *NumericStatistics { - if x, ok := x.GetStats().(*FeatureNameStatistics_NumStats); ok { - return x.NumStats - } - return nil -} - -func (x *FeatureNameStatistics) GetStringStats() *StringStatistics { - if x, ok := x.GetStats().(*FeatureNameStatistics_StringStats); ok { - return x.StringStats - } - return nil -} - -func (x *FeatureNameStatistics) GetBytesStats() *BytesStatistics { - if x, ok := x.GetStats().(*FeatureNameStatistics_BytesStats); ok { - return x.BytesStats - } - return nil -} - -func (x *FeatureNameStatistics) GetStructStats() *StructStatistics { - if x, ok := x.GetStats().(*FeatureNameStatistics_StructStats); ok { - return x.StructStats - } - return nil -} - -func (x *FeatureNameStatistics) GetCustomStats() []*CustomStatistic { - if x != nil { - return x.CustomStats - } - return nil -} - -type isFeatureNameStatistics_FieldId interface { - isFeatureNameStatistics_FieldId() -} - -type FeatureNameStatistics_Name struct { - // The feature name - Name string `protobuf:"bytes,1,opt,name=name,proto3,oneof"` -} - -type FeatureNameStatistics_Path struct { - // The path of the feature. - Path *Path `protobuf:"bytes,8,opt,name=path,proto3,oneof"` -} - -func (*FeatureNameStatistics_Name) isFeatureNameStatistics_FieldId() {} - -func (*FeatureNameStatistics_Path) isFeatureNameStatistics_FieldId() {} - -type isFeatureNameStatistics_Stats interface { - isFeatureNameStatistics_Stats() -} - -type FeatureNameStatistics_NumStats struct { - NumStats *NumericStatistics `protobuf:"bytes,3,opt,name=num_stats,json=numStats,proto3,oneof"` -} - -type FeatureNameStatistics_StringStats struct { - StringStats *StringStatistics `protobuf:"bytes,4,opt,name=string_stats,json=stringStats,proto3,oneof"` -} - -type FeatureNameStatistics_BytesStats struct { - BytesStats *BytesStatistics `protobuf:"bytes,5,opt,name=bytes_stats,json=bytesStats,proto3,oneof"` -} - -type FeatureNameStatistics_StructStats struct { - StructStats *StructStatistics `protobuf:"bytes,7,opt,name=struct_stats,json=structStats,proto3,oneof"` -} - -func (*FeatureNameStatistics_NumStats) isFeatureNameStatistics_Stats() {} - -func (*FeatureNameStatistics_StringStats) isFeatureNameStatistics_Stats() {} - -func (*FeatureNameStatistics_BytesStats) isFeatureNameStatistics_Stats() {} - -func (*FeatureNameStatistics_StructStats) isFeatureNameStatistics_Stats() {} - -// Common weighted statistics for all feature types. Statistics counting number -// of values (i.e., avg_num_values and tot_num_values) include NaNs. -// If the weighted column is missing, then this counts as a weight of 1 -// for that example. -type WeightedCommonStatistics struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Weighted number of examples not missing. - NumNonMissing float64 `protobuf:"fixed64,1,opt,name=num_non_missing,json=numNonMissing,proto3" json:"num_non_missing,omitempty"` - // Weighted number of examples missing. - // Note that if the weighted column is zero, this does not count - // as missing. - NumMissing float64 `protobuf:"fixed64,2,opt,name=num_missing,json=numMissing,proto3" json:"num_missing,omitempty"` - // average number of values, weighted by the number of examples. - AvgNumValues float64 `protobuf:"fixed64,3,opt,name=avg_num_values,json=avgNumValues,proto3" json:"avg_num_values,omitempty"` - // tot_num_values = avg_num_values * num_non_missing. - // This is calculated directly, so should have less numerical error. - TotNumValues float64 `protobuf:"fixed64,4,opt,name=tot_num_values,json=totNumValues,proto3" json:"tot_num_values,omitempty"` -} - -func (x *WeightedCommonStatistics) Reset() { - *x = WeightedCommonStatistics{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[8] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *WeightedCommonStatistics) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*WeightedCommonStatistics) ProtoMessage() {} - -func (x *WeightedCommonStatistics) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[8] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use WeightedCommonStatistics.ProtoReflect.Descriptor instead. -func (*WeightedCommonStatistics) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_statistics_proto_rawDescGZIP(), []int{8} -} - -func (x *WeightedCommonStatistics) GetNumNonMissing() float64 { - if x != nil { - return x.NumNonMissing - } - return 0 -} - -func (x *WeightedCommonStatistics) GetNumMissing() float64 { - if x != nil { - return x.NumMissing - } - return 0 -} - -func (x *WeightedCommonStatistics) GetAvgNumValues() float64 { - if x != nil { - return x.AvgNumValues - } - return 0 -} - -func (x *WeightedCommonStatistics) GetTotNumValues() float64 { - if x != nil { - return x.TotNumValues - } - return 0 -} - -// Stores the name and value of any custom statistic. The value can be a string, -// double, or histogram. -type CustomStatistic struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - // Types that are assignable to Val: - // *CustomStatistic_Num - // *CustomStatistic_Str - // *CustomStatistic_Histogram - // *CustomStatistic_RankHistogram - Val isCustomStatistic_Val `protobuf_oneof:"val"` -} - -func (x *CustomStatistic) Reset() { - *x = CustomStatistic{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[9] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *CustomStatistic) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*CustomStatistic) ProtoMessage() {} - -func (x *CustomStatistic) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[9] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use CustomStatistic.ProtoReflect.Descriptor instead. -func (*CustomStatistic) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_statistics_proto_rawDescGZIP(), []int{9} -} - -func (x *CustomStatistic) GetName() string { - if x != nil { - return x.Name - } - return "" -} - -func (m *CustomStatistic) GetVal() isCustomStatistic_Val { - if m != nil { - return m.Val - } - return nil -} - -func (x *CustomStatistic) GetNum() float64 { - if x, ok := x.GetVal().(*CustomStatistic_Num); ok { - return x.Num - } - return 0 -} - -func (x *CustomStatistic) GetStr() string { - if x, ok := x.GetVal().(*CustomStatistic_Str); ok { - return x.Str - } - return "" -} - -func (x *CustomStatistic) GetHistogram() *Histogram { - if x, ok := x.GetVal().(*CustomStatistic_Histogram); ok { - return x.Histogram - } - return nil -} - -func (x *CustomStatistic) GetRankHistogram() *RankHistogram { - if x, ok := x.GetVal().(*CustomStatistic_RankHistogram); ok { - return x.RankHistogram - } - return nil -} - -type isCustomStatistic_Val interface { - isCustomStatistic_Val() -} - -type CustomStatistic_Num struct { - Num float64 `protobuf:"fixed64,2,opt,name=num,proto3,oneof"` -} - -type CustomStatistic_Str struct { - Str string `protobuf:"bytes,3,opt,name=str,proto3,oneof"` -} - -type CustomStatistic_Histogram struct { - Histogram *Histogram `protobuf:"bytes,4,opt,name=histogram,proto3,oneof"` -} - -type CustomStatistic_RankHistogram struct { - RankHistogram *RankHistogram `protobuf:"bytes,5,opt,name=rank_histogram,json=rankHistogram,proto3,oneof"` -} - -func (*CustomStatistic_Num) isCustomStatistic_Val() {} - -func (*CustomStatistic_Str) isCustomStatistic_Val() {} - -func (*CustomStatistic_Histogram) isCustomStatistic_Val() {} - -func (*CustomStatistic_RankHistogram) isCustomStatistic_Val() {} - -// Statistics for a numeric feature in a dataset. -type NumericStatistics struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - CommonStats *CommonStatistics `protobuf:"bytes,1,opt,name=common_stats,json=commonStats,proto3" json:"common_stats,omitempty"` - // The mean of the values - Mean float64 `protobuf:"fixed64,2,opt,name=mean,proto3" json:"mean,omitempty"` - // The standard deviation of the values - StdDev float64 `protobuf:"fixed64,3,opt,name=std_dev,json=stdDev,proto3" json:"std_dev,omitempty"` - // The number of values that equal 0 - NumZeros uint64 `protobuf:"varint,4,opt,name=num_zeros,json=numZeros,proto3" json:"num_zeros,omitempty"` - // The minimum value - Min float64 `protobuf:"fixed64,5,opt,name=min,proto3" json:"min,omitempty"` - // The median value - Median float64 `protobuf:"fixed64,6,opt,name=median,proto3" json:"median,omitempty"` - // The maximum value - Max float64 `protobuf:"fixed64,7,opt,name=max,proto3" json:"max,omitempty"` - // The histogram(s) of the feature values. - Histograms []*Histogram `protobuf:"bytes,8,rep,name=histograms,proto3" json:"histograms,omitempty"` - // Weighted statistics for the feature, if the values have weights. - WeightedNumericStats *WeightedNumericStatistics `protobuf:"bytes,9,opt,name=weighted_numeric_stats,json=weightedNumericStats,proto3" json:"weighted_numeric_stats,omitempty"` -} - -func (x *NumericStatistics) Reset() { - *x = NumericStatistics{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[10] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *NumericStatistics) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*NumericStatistics) ProtoMessage() {} - -func (x *NumericStatistics) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[10] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use NumericStatistics.ProtoReflect.Descriptor instead. -func (*NumericStatistics) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_statistics_proto_rawDescGZIP(), []int{10} -} - -func (x *NumericStatistics) GetCommonStats() *CommonStatistics { - if x != nil { - return x.CommonStats - } - return nil -} - -func (x *NumericStatistics) GetMean() float64 { - if x != nil { - return x.Mean - } - return 0 -} - -func (x *NumericStatistics) GetStdDev() float64 { - if x != nil { - return x.StdDev - } - return 0 -} - -func (x *NumericStatistics) GetNumZeros() uint64 { - if x != nil { - return x.NumZeros - } - return 0 -} - -func (x *NumericStatistics) GetMin() float64 { - if x != nil { - return x.Min - } - return 0 -} - -func (x *NumericStatistics) GetMedian() float64 { - if x != nil { - return x.Median - } - return 0 -} - -func (x *NumericStatistics) GetMax() float64 { - if x != nil { - return x.Max - } - return 0 -} - -func (x *NumericStatistics) GetHistograms() []*Histogram { - if x != nil { - return x.Histograms - } - return nil -} - -func (x *NumericStatistics) GetWeightedNumericStats() *WeightedNumericStatistics { - if x != nil { - return x.WeightedNumericStats - } - return nil -} - -// Statistics for a string feature in a dataset. -type StringStatistics struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - CommonStats *CommonStatistics `protobuf:"bytes,1,opt,name=common_stats,json=commonStats,proto3" json:"common_stats,omitempty"` - // The number of unique values - Unique uint64 `protobuf:"varint,2,opt,name=unique,proto3" json:"unique,omitempty"` - // A sorted list of the most-frequent values and their frequencies, with - // the most-frequent being first. - TopValues []*StringStatistics_FreqAndValue `protobuf:"bytes,3,rep,name=top_values,json=topValues,proto3" json:"top_values,omitempty"` - // The average length of the values - AvgLength float32 `protobuf:"fixed32,4,opt,name=avg_length,json=avgLength,proto3" json:"avg_length,omitempty"` - // The rank histogram for the values of the feature. - // The rank is used to measure of how commonly the value is found in the - // dataset. The most common value would have a rank of 1, with the second-most - // common value having a rank of 2, and so on. - RankHistogram *RankHistogram `protobuf:"bytes,5,opt,name=rank_histogram,json=rankHistogram,proto3" json:"rank_histogram,omitempty"` - // Weighted statistics for the feature, if the values have weights. - WeightedStringStats *WeightedStringStatistics `protobuf:"bytes,6,opt,name=weighted_string_stats,json=weightedStringStats,proto3" json:"weighted_string_stats,omitempty"` - // A vocabulary file, used for vocabularies too large to store in the proto - // itself. Note that the file may be relative to some context-dependent - // directory. E.g. in TFX the feature statistics will live in a PPP and - // vocabulary file names will be relative to this PPP. - VocabularyFile string `protobuf:"bytes,7,opt,name=vocabulary_file,json=vocabularyFile,proto3" json:"vocabulary_file,omitempty"` -} - -func (x *StringStatistics) Reset() { - *x = StringStatistics{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[11] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *StringStatistics) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*StringStatistics) ProtoMessage() {} - -func (x *StringStatistics) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[11] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use StringStatistics.ProtoReflect.Descriptor instead. -func (*StringStatistics) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_statistics_proto_rawDescGZIP(), []int{11} -} - -func (x *StringStatistics) GetCommonStats() *CommonStatistics { - if x != nil { - return x.CommonStats - } - return nil -} - -func (x *StringStatistics) GetUnique() uint64 { - if x != nil { - return x.Unique - } - return 0 -} - -func (x *StringStatistics) GetTopValues() []*StringStatistics_FreqAndValue { - if x != nil { - return x.TopValues - } - return nil -} - -func (x *StringStatistics) GetAvgLength() float32 { - if x != nil { - return x.AvgLength - } - return 0 -} - -func (x *StringStatistics) GetRankHistogram() *RankHistogram { - if x != nil { - return x.RankHistogram - } - return nil -} - -func (x *StringStatistics) GetWeightedStringStats() *WeightedStringStatistics { - if x != nil { - return x.WeightedStringStats - } - return nil -} - -func (x *StringStatistics) GetVocabularyFile() string { - if x != nil { - return x.VocabularyFile - } - return "" -} - -// Statistics for a weighted numeric feature in a dataset. -type WeightedNumericStatistics struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // The weighted mean of the values - Mean float64 `protobuf:"fixed64,1,opt,name=mean,proto3" json:"mean,omitempty"` - // The weighted standard deviation of the values - StdDev float64 `protobuf:"fixed64,2,opt,name=std_dev,json=stdDev,proto3" json:"std_dev,omitempty"` - // The weighted median of the values - Median float64 `protobuf:"fixed64,3,opt,name=median,proto3" json:"median,omitempty"` - // The histogram(s) of the weighted feature values. - Histograms []*Histogram `protobuf:"bytes,4,rep,name=histograms,proto3" json:"histograms,omitempty"` -} - -func (x *WeightedNumericStatistics) Reset() { - *x = WeightedNumericStatistics{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[12] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *WeightedNumericStatistics) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*WeightedNumericStatistics) ProtoMessage() {} - -func (x *WeightedNumericStatistics) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[12] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use WeightedNumericStatistics.ProtoReflect.Descriptor instead. -func (*WeightedNumericStatistics) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_statistics_proto_rawDescGZIP(), []int{12} -} - -func (x *WeightedNumericStatistics) GetMean() float64 { - if x != nil { - return x.Mean - } - return 0 -} - -func (x *WeightedNumericStatistics) GetStdDev() float64 { - if x != nil { - return x.StdDev - } - return 0 -} - -func (x *WeightedNumericStatistics) GetMedian() float64 { - if x != nil { - return x.Median - } - return 0 -} - -func (x *WeightedNumericStatistics) GetHistograms() []*Histogram { - if x != nil { - return x.Histograms - } - return nil -} - -// Statistics for a weighted string feature in a dataset. -type WeightedStringStatistics struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // A sorted list of the most-frequent values and their weighted frequencies, - // with the most-frequent being first. - TopValues []*StringStatistics_FreqAndValue `protobuf:"bytes,1,rep,name=top_values,json=topValues,proto3" json:"top_values,omitempty"` - // The rank histogram for the weighted values of the feature. - RankHistogram *RankHistogram `protobuf:"bytes,2,opt,name=rank_histogram,json=rankHistogram,proto3" json:"rank_histogram,omitempty"` -} - -func (x *WeightedStringStatistics) Reset() { - *x = WeightedStringStatistics{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[13] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *WeightedStringStatistics) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*WeightedStringStatistics) ProtoMessage() {} - -func (x *WeightedStringStatistics) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[13] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use WeightedStringStatistics.ProtoReflect.Descriptor instead. -func (*WeightedStringStatistics) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_statistics_proto_rawDescGZIP(), []int{13} -} - -func (x *WeightedStringStatistics) GetTopValues() []*StringStatistics_FreqAndValue { - if x != nil { - return x.TopValues - } - return nil -} - -func (x *WeightedStringStatistics) GetRankHistogram() *RankHistogram { - if x != nil { - return x.RankHistogram - } - return nil -} - -// Statistics for a bytes feature in a dataset. -type BytesStatistics struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - CommonStats *CommonStatistics `protobuf:"bytes,1,opt,name=common_stats,json=commonStats,proto3" json:"common_stats,omitempty"` - // The number of unique values - Unique uint64 `protobuf:"varint,2,opt,name=unique,proto3" json:"unique,omitempty"` - // The average number of bytes in a value - AvgNumBytes float32 `protobuf:"fixed32,3,opt,name=avg_num_bytes,json=avgNumBytes,proto3" json:"avg_num_bytes,omitempty"` - // The minimum number of bytes in a value - MinNumBytes float32 `protobuf:"fixed32,4,opt,name=min_num_bytes,json=minNumBytes,proto3" json:"min_num_bytes,omitempty"` - // The maximum number of bytes in a value - MaxNumBytes float32 `protobuf:"fixed32,5,opt,name=max_num_bytes,json=maxNumBytes,proto3" json:"max_num_bytes,omitempty"` -} - -func (x *BytesStatistics) Reset() { - *x = BytesStatistics{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[14] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *BytesStatistics) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*BytesStatistics) ProtoMessage() {} - -func (x *BytesStatistics) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[14] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use BytesStatistics.ProtoReflect.Descriptor instead. -func (*BytesStatistics) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_statistics_proto_rawDescGZIP(), []int{14} -} - -func (x *BytesStatistics) GetCommonStats() *CommonStatistics { - if x != nil { - return x.CommonStats - } - return nil -} - -func (x *BytesStatistics) GetUnique() uint64 { - if x != nil { - return x.Unique - } - return 0 -} - -func (x *BytesStatistics) GetAvgNumBytes() float32 { - if x != nil { - return x.AvgNumBytes - } - return 0 -} - -func (x *BytesStatistics) GetMinNumBytes() float32 { - if x != nil { - return x.MinNumBytes - } - return 0 -} - -func (x *BytesStatistics) GetMaxNumBytes() float32 { - if x != nil { - return x.MaxNumBytes - } - return 0 -} - -type StructStatistics struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - CommonStats *CommonStatistics `protobuf:"bytes,1,opt,name=common_stats,json=commonStats,proto3" json:"common_stats,omitempty"` -} - -func (x *StructStatistics) Reset() { - *x = StructStatistics{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[15] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *StructStatistics) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*StructStatistics) ProtoMessage() {} - -func (x *StructStatistics) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[15] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use StructStatistics.ProtoReflect.Descriptor instead. -func (*StructStatistics) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_statistics_proto_rawDescGZIP(), []int{15} -} - -func (x *StructStatistics) GetCommonStats() *CommonStatistics { - if x != nil { - return x.CommonStats - } - return nil -} - -// Common statistics for all feature types. Statistics counting number of values -// (i.e., min_num_values, max_num_values, avg_num_values, and tot_num_values) -// include NaNs. -type CommonStatistics struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // The number of examples with at least one value for this feature. - NumNonMissing uint64 `protobuf:"varint,1,opt,name=num_non_missing,json=numNonMissing,proto3" json:"num_non_missing,omitempty"` - // The number of examples with no values for this feature. - NumMissing uint64 `protobuf:"varint,2,opt,name=num_missing,json=numMissing,proto3" json:"num_missing,omitempty"` - // The minimum number of values in a single example for this feature. - MinNumValues uint64 `protobuf:"varint,3,opt,name=min_num_values,json=minNumValues,proto3" json:"min_num_values,omitempty"` - // The maximum number of values in a single example for this feature. - MaxNumValues uint64 `protobuf:"varint,4,opt,name=max_num_values,json=maxNumValues,proto3" json:"max_num_values,omitempty"` - // The average number of values in a single example for this feature. - AvgNumValues float32 `protobuf:"fixed32,5,opt,name=avg_num_values,json=avgNumValues,proto3" json:"avg_num_values,omitempty"` - // tot_num_values = avg_num_values * num_non_missing. - // This is calculated directly, so should have less numerical error. - TotNumValues uint64 `protobuf:"varint,8,opt,name=tot_num_values,json=totNumValues,proto3" json:"tot_num_values,omitempty"` - // The quantiles histogram for the number of values in this feature. - NumValuesHistogram *Histogram `protobuf:"bytes,6,opt,name=num_values_histogram,json=numValuesHistogram,proto3" json:"num_values_histogram,omitempty"` - WeightedCommonStats *WeightedCommonStatistics `protobuf:"bytes,7,opt,name=weighted_common_stats,json=weightedCommonStats,proto3" json:"weighted_common_stats,omitempty"` - // The histogram for the number of features in the feature list (only set if - // this feature is a non-context feature from a tf.SequenceExample). - // This is different from num_values_histogram, as num_values_histogram tracks - // the count of all values for a feature in an example, whereas this tracks - // the length of the feature list for this feature in an example (where each - // feature list can contain multiple values). - FeatureListLengthHistogram *Histogram `protobuf:"bytes,9,opt,name=feature_list_length_histogram,json=featureListLengthHistogram,proto3" json:"feature_list_length_histogram,omitempty"` -} - -func (x *CommonStatistics) Reset() { - *x = CommonStatistics{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[16] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *CommonStatistics) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*CommonStatistics) ProtoMessage() {} - -func (x *CommonStatistics) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[16] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use CommonStatistics.ProtoReflect.Descriptor instead. -func (*CommonStatistics) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_statistics_proto_rawDescGZIP(), []int{16} -} - -func (x *CommonStatistics) GetNumNonMissing() uint64 { - if x != nil { - return x.NumNonMissing - } - return 0 -} - -func (x *CommonStatistics) GetNumMissing() uint64 { - if x != nil { - return x.NumMissing - } - return 0 -} - -func (x *CommonStatistics) GetMinNumValues() uint64 { - if x != nil { - return x.MinNumValues - } - return 0 -} - -func (x *CommonStatistics) GetMaxNumValues() uint64 { - if x != nil { - return x.MaxNumValues - } - return 0 -} - -func (x *CommonStatistics) GetAvgNumValues() float32 { - if x != nil { - return x.AvgNumValues - } - return 0 -} - -func (x *CommonStatistics) GetTotNumValues() uint64 { - if x != nil { - return x.TotNumValues - } - return 0 -} - -func (x *CommonStatistics) GetNumValuesHistogram() *Histogram { - if x != nil { - return x.NumValuesHistogram - } - return nil -} - -func (x *CommonStatistics) GetWeightedCommonStats() *WeightedCommonStatistics { - if x != nil { - return x.WeightedCommonStats - } - return nil -} - -func (x *CommonStatistics) GetFeatureListLengthHistogram() *Histogram { - if x != nil { - return x.FeatureListLengthHistogram - } - return nil -} - -// The data used to create a histogram of a numeric feature for a dataset. -type Histogram struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // The number of NaN values in the dataset. - NumNan uint64 `protobuf:"varint,1,opt,name=num_nan,json=numNan,proto3" json:"num_nan,omitempty"` - // The number of undefined values in the dataset. - NumUndefined uint64 `protobuf:"varint,2,opt,name=num_undefined,json=numUndefined,proto3" json:"num_undefined,omitempty"` - // A list of buckets in the histogram, sorted from lowest bucket to highest - // bucket. - Buckets []*Histogram_Bucket `protobuf:"bytes,3,rep,name=buckets,proto3" json:"buckets,omitempty"` - // The type of the histogram. - Type Histogram_HistogramType `protobuf:"varint,4,opt,name=type,proto3,enum=tensorflow.metadata.v0.Histogram_HistogramType" json:"type,omitempty"` - // An optional descriptive name of the histogram, to be used for labeling. - Name string `protobuf:"bytes,5,opt,name=name,proto3" json:"name,omitempty"` -} - -func (x *Histogram) Reset() { - *x = Histogram{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[17] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Histogram) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Histogram) ProtoMessage() {} - -func (x *Histogram) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[17] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Histogram.ProtoReflect.Descriptor instead. -func (*Histogram) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_statistics_proto_rawDescGZIP(), []int{17} -} - -func (x *Histogram) GetNumNan() uint64 { - if x != nil { - return x.NumNan - } - return 0 -} - -func (x *Histogram) GetNumUndefined() uint64 { - if x != nil { - return x.NumUndefined - } - return 0 -} - -func (x *Histogram) GetBuckets() []*Histogram_Bucket { - if x != nil { - return x.Buckets - } - return nil -} - -func (x *Histogram) GetType() Histogram_HistogramType { - if x != nil { - return x.Type - } - return Histogram_STANDARD -} - -func (x *Histogram) GetName() string { - if x != nil { - return x.Name - } - return "" -} - -// The data used to create a rank histogram of a non-numeric feature of a -// dataset. The rank of a value in a feature can be used as a measure of how -// commonly the value is found in the entire dataset. With bucket sizes of one, -// this becomes a distribution function of all feature values. -type RankHistogram struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // A list of buckets in the histogram, sorted from lowest-ranked bucket to - // highest-ranked bucket. - Buckets []*RankHistogram_Bucket `protobuf:"bytes,1,rep,name=buckets,proto3" json:"buckets,omitempty"` - // An optional descriptive name of the histogram, to be used for labeling. - Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` -} - -func (x *RankHistogram) Reset() { - *x = RankHistogram{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[18] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *RankHistogram) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*RankHistogram) ProtoMessage() {} - -func (x *RankHistogram) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[18] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use RankHistogram.ProtoReflect.Descriptor instead. -func (*RankHistogram) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_statistics_proto_rawDescGZIP(), []int{18} -} - -func (x *RankHistogram) GetBuckets() []*RankHistogram_Bucket { - if x != nil { - return x.Buckets - } - return nil -} - -func (x *RankHistogram) GetName() string { - if x != nil { - return x.Name - } - return "" -} - -// A bucket for referring to binned numeric features. -type LiftSeries_Bucket struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // The low value of the bucket, inclusive. - LowValue float64 `protobuf:"fixed64,1,opt,name=low_value,json=lowValue,proto3" json:"low_value,omitempty"` - // The high value of the bucket, exclusive (unless the high_value is - // positive infinity). - HighValue float64 `protobuf:"fixed64,2,opt,name=high_value,json=highValue,proto3" json:"high_value,omitempty"` -} - -func (x *LiftSeries_Bucket) Reset() { - *x = LiftSeries_Bucket{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[19] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *LiftSeries_Bucket) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*LiftSeries_Bucket) ProtoMessage() {} - -func (x *LiftSeries_Bucket) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[19] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use LiftSeries_Bucket.ProtoReflect.Descriptor instead. -func (*LiftSeries_Bucket) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_statistics_proto_rawDescGZIP(), []int{6, 0} -} - -func (x *LiftSeries_Bucket) GetLowValue() float64 { - if x != nil { - return x.LowValue - } - return 0 -} - -func (x *LiftSeries_Bucket) GetHighValue() float64 { - if x != nil { - return x.HighValue - } - return 0 -} - -// A container for lift information about a specific value of path_x. -type LiftSeries_LiftValue struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Types that are assignable to XValue: - // *LiftSeries_LiftValue_XInt - // *LiftSeries_LiftValue_XString - XValue isLiftSeries_LiftValue_XValue `protobuf_oneof:"x_value"` - // P(path_y=y|path_x=x) / P(path_y=y) for x_value and the enclosing y_value. - // In terms of concrete fields, this number represents: - // (x_and_y_count / x_count) / (y_count / num_examples) - Lift float64 `protobuf:"fixed64,3,opt,name=lift,proto3" json:"lift,omitempty"` - // The number of examples in which x_value appears. - // - // Types that are assignable to XCountValue: - // *LiftSeries_LiftValue_XCount - // *LiftSeries_LiftValue_WeightedXCount - XCountValue isLiftSeries_LiftValue_XCountValue `protobuf_oneof:"x_count_value"` - // The number of examples in which x_value appears and y_value appears. - // - // Types that are assignable to XAndYCountValue: - // *LiftSeries_LiftValue_XAndYCount - // *LiftSeries_LiftValue_WeightedXAndYCount - XAndYCountValue isLiftSeries_LiftValue_XAndYCountValue `protobuf_oneof:"x_and_y_count_value"` -} - -func (x *LiftSeries_LiftValue) Reset() { - *x = LiftSeries_LiftValue{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[20] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *LiftSeries_LiftValue) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*LiftSeries_LiftValue) ProtoMessage() {} - -func (x *LiftSeries_LiftValue) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[20] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use LiftSeries_LiftValue.ProtoReflect.Descriptor instead. -func (*LiftSeries_LiftValue) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_statistics_proto_rawDescGZIP(), []int{6, 1} -} - -func (m *LiftSeries_LiftValue) GetXValue() isLiftSeries_LiftValue_XValue { - if m != nil { - return m.XValue - } - return nil -} - -func (x *LiftSeries_LiftValue) GetXInt() int32 { - if x, ok := x.GetXValue().(*LiftSeries_LiftValue_XInt); ok { - return x.XInt - } - return 0 -} - -func (x *LiftSeries_LiftValue) GetXString() string { - if x, ok := x.GetXValue().(*LiftSeries_LiftValue_XString); ok { - return x.XString - } - return "" -} - -func (x *LiftSeries_LiftValue) GetLift() float64 { - if x != nil { - return x.Lift - } - return 0 -} - -func (m *LiftSeries_LiftValue) GetXCountValue() isLiftSeries_LiftValue_XCountValue { - if m != nil { - return m.XCountValue - } - return nil -} - -func (x *LiftSeries_LiftValue) GetXCount() uint64 { - if x, ok := x.GetXCountValue().(*LiftSeries_LiftValue_XCount); ok { - return x.XCount - } - return 0 -} - -func (x *LiftSeries_LiftValue) GetWeightedXCount() float64 { - if x, ok := x.GetXCountValue().(*LiftSeries_LiftValue_WeightedXCount); ok { - return x.WeightedXCount - } - return 0 -} - -func (m *LiftSeries_LiftValue) GetXAndYCountValue() isLiftSeries_LiftValue_XAndYCountValue { - if m != nil { - return m.XAndYCountValue - } - return nil -} - -func (x *LiftSeries_LiftValue) GetXAndYCount() uint64 { - if x, ok := x.GetXAndYCountValue().(*LiftSeries_LiftValue_XAndYCount); ok { - return x.XAndYCount - } - return 0 -} - -func (x *LiftSeries_LiftValue) GetWeightedXAndYCount() float64 { - if x, ok := x.GetXAndYCountValue().(*LiftSeries_LiftValue_WeightedXAndYCount); ok { - return x.WeightedXAndYCount - } - return 0 -} - -type isLiftSeries_LiftValue_XValue interface { - isLiftSeries_LiftValue_XValue() -} - -type LiftSeries_LiftValue_XInt struct { - XInt int32 `protobuf:"varint,1,opt,name=x_int,json=xInt,proto3,oneof"` -} - -type LiftSeries_LiftValue_XString struct { - XString string `protobuf:"bytes,2,opt,name=x_string,json=xString,proto3,oneof"` -} - -func (*LiftSeries_LiftValue_XInt) isLiftSeries_LiftValue_XValue() {} - -func (*LiftSeries_LiftValue_XString) isLiftSeries_LiftValue_XValue() {} - -type isLiftSeries_LiftValue_XCountValue interface { - isLiftSeries_LiftValue_XCountValue() -} - -type LiftSeries_LiftValue_XCount struct { - XCount uint64 `protobuf:"varint,4,opt,name=x_count,json=xCount,proto3,oneof"` -} - -type LiftSeries_LiftValue_WeightedXCount struct { - WeightedXCount float64 `protobuf:"fixed64,5,opt,name=weighted_x_count,json=weightedXCount,proto3,oneof"` -} - -func (*LiftSeries_LiftValue_XCount) isLiftSeries_LiftValue_XCountValue() {} - -func (*LiftSeries_LiftValue_WeightedXCount) isLiftSeries_LiftValue_XCountValue() {} - -type isLiftSeries_LiftValue_XAndYCountValue interface { - isLiftSeries_LiftValue_XAndYCountValue() -} - -type LiftSeries_LiftValue_XAndYCount struct { - XAndYCount uint64 `protobuf:"varint,6,opt,name=x_and_y_count,json=xAndYCount,proto3,oneof"` -} - -type LiftSeries_LiftValue_WeightedXAndYCount struct { - WeightedXAndYCount float64 `protobuf:"fixed64,7,opt,name=weighted_x_and_y_count,json=weightedXAndYCount,proto3,oneof"` -} - -func (*LiftSeries_LiftValue_XAndYCount) isLiftSeries_LiftValue_XAndYCountValue() {} - -func (*LiftSeries_LiftValue_WeightedXAndYCount) isLiftSeries_LiftValue_XAndYCountValue() {} - -type StringStatistics_FreqAndValue struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` - // The number of times the value occurs. Stored as a double to be able to - // handle weighted features. - Frequency float64 `protobuf:"fixed64,3,opt,name=frequency,proto3" json:"frequency,omitempty"` -} - -func (x *StringStatistics_FreqAndValue) Reset() { - *x = StringStatistics_FreqAndValue{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[21] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *StringStatistics_FreqAndValue) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*StringStatistics_FreqAndValue) ProtoMessage() {} - -func (x *StringStatistics_FreqAndValue) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[21] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use StringStatistics_FreqAndValue.ProtoReflect.Descriptor instead. -func (*StringStatistics_FreqAndValue) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_statistics_proto_rawDescGZIP(), []int{11, 0} -} - -func (x *StringStatistics_FreqAndValue) GetValue() string { - if x != nil { - return x.Value - } - return "" -} - -func (x *StringStatistics_FreqAndValue) GetFrequency() float64 { - if x != nil { - return x.Frequency - } - return 0 -} - -// Each bucket defines its low and high values along with its count. The -// low and high values must be a real number or positive or negative -// infinity. They cannot be NaN or undefined. Counts of those special values -// can be found in the numNaN and numUndefined fields. -type Histogram_Bucket struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // The low value of the bucket, inclusive. - LowValue float64 `protobuf:"fixed64,1,opt,name=low_value,json=lowValue,proto3" json:"low_value,omitempty"` - // The high value of the bucket, exclusive (unless the highValue is - // positive infinity). - HighValue float64 `protobuf:"fixed64,2,opt,name=high_value,json=highValue,proto3" json:"high_value,omitempty"` - // The number of items in the bucket. Stored as a double to be able to - // handle weighted histograms. - SampleCount float64 `protobuf:"fixed64,4,opt,name=sample_count,json=sampleCount,proto3" json:"sample_count,omitempty"` -} - -func (x *Histogram_Bucket) Reset() { - *x = Histogram_Bucket{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[22] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Histogram_Bucket) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Histogram_Bucket) ProtoMessage() {} - -func (x *Histogram_Bucket) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[22] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Histogram_Bucket.ProtoReflect.Descriptor instead. -func (*Histogram_Bucket) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_statistics_proto_rawDescGZIP(), []int{17, 0} -} - -func (x *Histogram_Bucket) GetLowValue() float64 { - if x != nil { - return x.LowValue - } - return 0 -} - -func (x *Histogram_Bucket) GetHighValue() float64 { - if x != nil { - return x.HighValue - } - return 0 -} - -func (x *Histogram_Bucket) GetSampleCount() float64 { - if x != nil { - return x.SampleCount - } - return 0 -} - -// Each bucket defines its start and end ranks along with its count. -type RankHistogram_Bucket struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // The low rank of the bucket, inclusive. - LowRank uint64 `protobuf:"varint,1,opt,name=low_rank,json=lowRank,proto3" json:"low_rank,omitempty"` - // The high rank of the bucket, exclusive. - HighRank uint64 `protobuf:"varint,2,opt,name=high_rank,json=highRank,proto3" json:"high_rank,omitempty"` - // The label for the bucket. Can be used to list or summarize the values in - // this rank bucket. - Label string `protobuf:"bytes,4,opt,name=label,proto3" json:"label,omitempty"` - // The number of items in the bucket. Stored as a double to be able to - // handle weighted histograms. - SampleCount float64 `protobuf:"fixed64,5,opt,name=sample_count,json=sampleCount,proto3" json:"sample_count,omitempty"` -} - -func (x *RankHistogram_Bucket) Reset() { - *x = RankHistogram_Bucket{} - if protoimpl.UnsafeEnabled { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[23] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *RankHistogram_Bucket) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*RankHistogram_Bucket) ProtoMessage() {} - -func (x *RankHistogram_Bucket) ProtoReflect() protoreflect.Message { - mi := &file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[23] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use RankHistogram_Bucket.ProtoReflect.Descriptor instead. -func (*RankHistogram_Bucket) Descriptor() ([]byte, []int) { - return file_tensorflow_metadata_proto_v0_statistics_proto_rawDescGZIP(), []int{18, 0} -} - -func (x *RankHistogram_Bucket) GetLowRank() uint64 { - if x != nil { - return x.LowRank - } - return 0 -} - -func (x *RankHistogram_Bucket) GetHighRank() uint64 { - if x != nil { - return x.HighRank - } - return 0 -} - -func (x *RankHistogram_Bucket) GetLabel() string { - if x != nil { - return x.Label - } - return "" -} - -func (x *RankHistogram_Bucket) GetSampleCount() float64 { - if x != nil { - return x.SampleCount - } - return 0 -} - -var File_tensorflow_metadata_proto_v0_statistics_proto protoreflect.FileDescriptor - -var file_tensorflow_metadata_proto_v0_statistics_proto_rawDesc = []byte{ - 0x0a, 0x2d, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x6d, 0x65, 0x74, - 0x61, 0x64, 0x61, 0x74, 0x61, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x76, 0x30, 0x2f, 0x73, - 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, - 0x16, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, - 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x1a, 0x27, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, - 0x6c, 0x6f, 0x77, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2f, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x2f, 0x76, 0x30, 0x2f, 0x70, 0x61, 0x74, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x22, 0x6c, 0x0a, 0x1c, 0x44, 0x61, 0x74, 0x61, 0x73, 0x65, 0x74, 0x46, 0x65, 0x61, 0x74, 0x75, - 0x72, 0x65, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x73, 0x4c, 0x69, 0x73, 0x74, - 0x12, 0x4c, 0x0a, 0x08, 0x64, 0x61, 0x74, 0x61, 0x73, 0x65, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, - 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x44, 0x61, 0x74, 0x61, - 0x73, 0x65, 0x74, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, - 0x74, 0x69, 0x63, 0x73, 0x52, 0x08, 0x64, 0x61, 0x74, 0x61, 0x73, 0x65, 0x74, 0x73, 0x22, 0xa7, - 0x02, 0x0a, 0x18, 0x44, 0x61, 0x74, 0x61, 0x73, 0x65, 0x74, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, - 0x65, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x6e, - 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, - 0x21, 0x0a, 0x0c, 0x6e, 0x75, 0x6d, 0x5f, 0x65, 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x73, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b, 0x6e, 0x75, 0x6d, 0x45, 0x78, 0x61, 0x6d, 0x70, 0x6c, - 0x65, 0x73, 0x12, 0x32, 0x0a, 0x15, 0x77, 0x65, 0x69, 0x67, 0x68, 0x74, 0x65, 0x64, 0x5f, 0x6e, - 0x75, 0x6d, 0x5f, 0x65, 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, - 0x01, 0x52, 0x13, 0x77, 0x65, 0x69, 0x67, 0x68, 0x74, 0x65, 0x64, 0x4e, 0x75, 0x6d, 0x45, 0x78, - 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x73, 0x12, 0x49, 0x0a, 0x08, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, - 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, - 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, - 0x30, 0x2e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x53, 0x74, 0x61, - 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x73, 0x52, 0x08, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, - 0x73, 0x12, 0x55, 0x0a, 0x0e, 0x63, 0x72, 0x6f, 0x73, 0x73, 0x5f, 0x66, 0x65, 0x61, 0x74, 0x75, - 0x72, 0x65, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x74, 0x65, 0x6e, 0x73, - 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, - 0x76, 0x30, 0x2e, 0x43, 0x72, 0x6f, 0x73, 0x73, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, - 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x73, 0x52, 0x0d, 0x63, 0x72, 0x6f, 0x73, 0x73, - 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x22, 0xef, 0x02, 0x0a, 0x16, 0x43, 0x72, 0x6f, - 0x73, 0x73, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, - 0x69, 0x63, 0x73, 0x12, 0x33, 0x0a, 0x06, 0x70, 0x61, 0x74, 0x68, 0x5f, 0x78, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, - 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x50, 0x61, 0x74, - 0x68, 0x52, 0x05, 0x70, 0x61, 0x74, 0x68, 0x58, 0x12, 0x33, 0x0a, 0x06, 0x70, 0x61, 0x74, 0x68, - 0x5f, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, - 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, - 0x30, 0x2e, 0x50, 0x61, 0x74, 0x68, 0x52, 0x05, 0x70, 0x61, 0x74, 0x68, 0x59, 0x12, 0x14, 0x0a, - 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x05, 0x63, 0x6f, - 0x75, 0x6e, 0x74, 0x12, 0x58, 0x0a, 0x0f, 0x6e, 0x75, 0x6d, 0x5f, 0x63, 0x72, 0x6f, 0x73, 0x73, - 0x5f, 0x73, 0x74, 0x61, 0x74, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x74, - 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, - 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x4e, 0x75, 0x6d, 0x65, 0x72, 0x69, 0x63, 0x43, 0x72, 0x6f, - 0x73, 0x73, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x73, 0x48, 0x00, 0x52, 0x0d, - 0x6e, 0x75, 0x6d, 0x43, 0x72, 0x6f, 0x73, 0x73, 0x53, 0x74, 0x61, 0x74, 0x73, 0x12, 0x6c, 0x0a, - 0x17, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x63, 0x72, 0x6f, - 0x73, 0x73, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x32, - 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, - 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x43, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x69, - 0x63, 0x61, 0x6c, 0x43, 0x72, 0x6f, 0x73, 0x73, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, - 0x63, 0x73, 0x48, 0x00, 0x52, 0x15, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x69, 0x63, 0x61, - 0x6c, 0x43, 0x72, 0x6f, 0x73, 0x73, 0x53, 0x74, 0x61, 0x74, 0x73, 0x42, 0x0d, 0x0a, 0x0b, 0x63, - 0x72, 0x6f, 0x73, 0x73, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x73, 0x22, 0x5a, 0x0a, 0x16, 0x4e, 0x75, - 0x6d, 0x65, 0x72, 0x69, 0x63, 0x43, 0x72, 0x6f, 0x73, 0x73, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, - 0x74, 0x69, 0x63, 0x73, 0x12, 0x20, 0x0a, 0x0b, 0x63, 0x6f, 0x72, 0x72, 0x65, 0x6c, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x02, 0x52, 0x0b, 0x63, 0x6f, 0x72, 0x72, 0x65, - 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1e, 0x0a, 0x0a, 0x63, 0x6f, 0x76, 0x61, 0x72, 0x69, - 0x61, 0x6e, 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x02, 0x52, 0x0a, 0x63, 0x6f, 0x76, 0x61, - 0x72, 0x69, 0x61, 0x6e, 0x63, 0x65, 0x22, 0x58, 0x0a, 0x1a, 0x43, 0x61, 0x74, 0x65, 0x67, 0x6f, - 0x72, 0x69, 0x63, 0x61, 0x6c, 0x43, 0x72, 0x6f, 0x73, 0x73, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, - 0x74, 0x69, 0x63, 0x73, 0x12, 0x3a, 0x0a, 0x04, 0x6c, 0x69, 0x66, 0x74, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, - 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x4c, 0x69, 0x66, 0x74, - 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x73, 0x52, 0x04, 0x6c, 0x69, 0x66, 0x74, - 0x22, 0xab, 0x01, 0x0a, 0x0e, 0x4c, 0x69, 0x66, 0x74, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, - 0x69, 0x63, 0x73, 0x12, 0x43, 0x0a, 0x0b, 0x6c, 0x69, 0x66, 0x74, 0x5f, 0x73, 0x65, 0x72, 0x69, - 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, - 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, - 0x30, 0x2e, 0x4c, 0x69, 0x66, 0x74, 0x53, 0x65, 0x72, 0x69, 0x65, 0x73, 0x52, 0x0a, 0x6c, 0x69, - 0x66, 0x74, 0x53, 0x65, 0x72, 0x69, 0x65, 0x73, 0x12, 0x54, 0x0a, 0x14, 0x77, 0x65, 0x69, 0x67, - 0x68, 0x74, 0x65, 0x64, 0x5f, 0x6c, 0x69, 0x66, 0x74, 0x5f, 0x73, 0x65, 0x72, 0x69, 0x65, 0x73, - 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, - 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, - 0x4c, 0x69, 0x66, 0x74, 0x53, 0x65, 0x72, 0x69, 0x65, 0x73, 0x52, 0x12, 0x77, 0x65, 0x69, 0x67, - 0x68, 0x74, 0x65, 0x64, 0x4c, 0x69, 0x66, 0x74, 0x53, 0x65, 0x72, 0x69, 0x65, 0x73, 0x22, 0xab, - 0x05, 0x0a, 0x0a, 0x4c, 0x69, 0x66, 0x74, 0x53, 0x65, 0x72, 0x69, 0x65, 0x73, 0x12, 0x15, 0x0a, - 0x05, 0x79, 0x5f, 0x69, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x48, 0x00, 0x52, 0x04, - 0x79, 0x49, 0x6e, 0x74, 0x12, 0x1b, 0x0a, 0x08, 0x79, 0x5f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x07, 0x79, 0x53, 0x74, 0x72, 0x69, 0x6e, - 0x67, 0x12, 0x46, 0x0a, 0x08, 0x79, 0x5f, 0x62, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x18, 0x03, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, - 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x4c, 0x69, 0x66, - 0x74, 0x53, 0x65, 0x72, 0x69, 0x65, 0x73, 0x2e, 0x42, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x48, 0x00, - 0x52, 0x07, 0x79, 0x42, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x12, 0x19, 0x0a, 0x07, 0x79, 0x5f, 0x63, - 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x04, 0x48, 0x01, 0x52, 0x06, 0x79, 0x43, - 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x2a, 0x0a, 0x10, 0x77, 0x65, 0x69, 0x67, 0x68, 0x74, 0x65, 0x64, - 0x5f, 0x79, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x01, 0x48, 0x01, - 0x52, 0x0e, 0x77, 0x65, 0x69, 0x67, 0x68, 0x74, 0x65, 0x64, 0x59, 0x43, 0x6f, 0x75, 0x6e, 0x74, - 0x12, 0x4d, 0x0a, 0x0b, 0x6c, 0x69, 0x66, 0x74, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, - 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2c, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, - 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x4c, - 0x69, 0x66, 0x74, 0x53, 0x65, 0x72, 0x69, 0x65, 0x73, 0x2e, 0x4c, 0x69, 0x66, 0x74, 0x56, 0x61, - 0x6c, 0x75, 0x65, 0x52, 0x0a, 0x6c, 0x69, 0x66, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x1a, - 0x44, 0x0a, 0x06, 0x42, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x6c, 0x6f, 0x77, - 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x01, 0x52, 0x08, 0x6c, 0x6f, - 0x77, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x68, 0x69, 0x67, 0x68, 0x5f, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, 0x52, 0x09, 0x68, 0x69, 0x67, 0x68, - 0x56, 0x61, 0x6c, 0x75, 0x65, 0x1a, 0xa8, 0x02, 0x0a, 0x09, 0x4c, 0x69, 0x66, 0x74, 0x56, 0x61, - 0x6c, 0x75, 0x65, 0x12, 0x15, 0x0a, 0x05, 0x78, 0x5f, 0x69, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x05, 0x48, 0x00, 0x52, 0x04, 0x78, 0x49, 0x6e, 0x74, 0x12, 0x1b, 0x0a, 0x08, 0x78, 0x5f, - 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x07, - 0x78, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x12, 0x12, 0x0a, 0x04, 0x6c, 0x69, 0x66, 0x74, 0x18, - 0x03, 0x20, 0x01, 0x28, 0x01, 0x52, 0x04, 0x6c, 0x69, 0x66, 0x74, 0x12, 0x19, 0x0a, 0x07, 0x78, - 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x04, 0x48, 0x01, 0x52, 0x06, - 0x78, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x2a, 0x0a, 0x10, 0x77, 0x65, 0x69, 0x67, 0x68, 0x74, - 0x65, 0x64, 0x5f, 0x78, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x01, - 0x48, 0x01, 0x52, 0x0e, 0x77, 0x65, 0x69, 0x67, 0x68, 0x74, 0x65, 0x64, 0x58, 0x43, 0x6f, 0x75, - 0x6e, 0x74, 0x12, 0x23, 0x0a, 0x0d, 0x78, 0x5f, 0x61, 0x6e, 0x64, 0x5f, 0x79, 0x5f, 0x63, 0x6f, - 0x75, 0x6e, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x04, 0x48, 0x02, 0x52, 0x0a, 0x78, 0x41, 0x6e, - 0x64, 0x59, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x34, 0x0a, 0x16, 0x77, 0x65, 0x69, 0x67, 0x68, - 0x74, 0x65, 0x64, 0x5f, 0x78, 0x5f, 0x61, 0x6e, 0x64, 0x5f, 0x79, 0x5f, 0x63, 0x6f, 0x75, 0x6e, - 0x74, 0x18, 0x07, 0x20, 0x01, 0x28, 0x01, 0x48, 0x02, 0x52, 0x12, 0x77, 0x65, 0x69, 0x67, 0x68, - 0x74, 0x65, 0x64, 0x58, 0x41, 0x6e, 0x64, 0x59, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x42, 0x09, 0x0a, - 0x07, 0x78, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x42, 0x0f, 0x0a, 0x0d, 0x78, 0x5f, 0x63, 0x6f, - 0x75, 0x6e, 0x74, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x42, 0x15, 0x0a, 0x13, 0x78, 0x5f, 0x61, - 0x6e, 0x64, 0x5f, 0x79, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x42, 0x09, 0x0a, 0x07, 0x79, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x42, 0x0f, 0x0a, 0x0d, 0x79, - 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0xfd, 0x04, 0x0a, - 0x15, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x53, 0x74, 0x61, 0x74, - 0x69, 0x73, 0x74, 0x69, 0x63, 0x73, 0x12, 0x14, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x32, 0x0a, 0x04, - 0x70, 0x61, 0x74, 0x68, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x74, 0x65, 0x6e, - 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, - 0x2e, 0x76, 0x30, 0x2e, 0x50, 0x61, 0x74, 0x68, 0x48, 0x00, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, - 0x12, 0x46, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x32, - 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, - 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x4e, - 0x61, 0x6d, 0x65, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x73, 0x2e, 0x54, 0x79, - 0x70, 0x65, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x48, 0x0a, 0x09, 0x6e, 0x75, 0x6d, 0x5f, - 0x73, 0x74, 0x61, 0x74, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x74, 0x65, - 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, - 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x4e, 0x75, 0x6d, 0x65, 0x72, 0x69, 0x63, 0x53, 0x74, 0x61, 0x74, - 0x69, 0x73, 0x74, 0x69, 0x63, 0x73, 0x48, 0x01, 0x52, 0x08, 0x6e, 0x75, 0x6d, 0x53, 0x74, 0x61, - 0x74, 0x73, 0x12, 0x4d, 0x0a, 0x0c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x73, 0x74, 0x61, - 0x74, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, - 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, - 0x30, 0x2e, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, - 0x63, 0x73, 0x48, 0x01, 0x52, 0x0b, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x53, 0x74, 0x61, 0x74, - 0x73, 0x12, 0x4a, 0x0a, 0x0b, 0x62, 0x79, 0x74, 0x65, 0x73, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x73, - 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, - 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, - 0x42, 0x79, 0x74, 0x65, 0x73, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x73, 0x48, - 0x01, 0x52, 0x0a, 0x62, 0x79, 0x74, 0x65, 0x73, 0x53, 0x74, 0x61, 0x74, 0x73, 0x12, 0x4d, 0x0a, - 0x0c, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x73, 0x18, 0x07, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, - 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x53, 0x74, 0x72, - 0x75, 0x63, 0x74, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x73, 0x48, 0x01, 0x52, - 0x0b, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x53, 0x74, 0x61, 0x74, 0x73, 0x12, 0x4a, 0x0a, 0x0c, - 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x73, 0x18, 0x06, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, - 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x43, 0x75, 0x73, 0x74, - 0x6f, 0x6d, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b, 0x63, 0x75, 0x73, - 0x74, 0x6f, 0x6d, 0x53, 0x74, 0x61, 0x74, 0x73, 0x22, 0x3d, 0x0a, 0x04, 0x54, 0x79, 0x70, 0x65, - 0x12, 0x07, 0x0a, 0x03, 0x49, 0x4e, 0x54, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x46, 0x4c, 0x4f, - 0x41, 0x54, 0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x53, 0x54, 0x52, 0x49, 0x4e, 0x47, 0x10, 0x02, - 0x12, 0x09, 0x0a, 0x05, 0x42, 0x59, 0x54, 0x45, 0x53, 0x10, 0x03, 0x12, 0x0a, 0x0a, 0x06, 0x53, - 0x54, 0x52, 0x55, 0x43, 0x54, 0x10, 0x04, 0x42, 0x0a, 0x0a, 0x08, 0x66, 0x69, 0x65, 0x6c, 0x64, - 0x5f, 0x69, 0x64, 0x42, 0x07, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x73, 0x22, 0xaf, 0x01, 0x0a, - 0x18, 0x57, 0x65, 0x69, 0x67, 0x68, 0x74, 0x65, 0x64, 0x43, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x53, - 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x73, 0x12, 0x26, 0x0a, 0x0f, 0x6e, 0x75, 0x6d, - 0x5f, 0x6e, 0x6f, 0x6e, 0x5f, 0x6d, 0x69, 0x73, 0x73, 0x69, 0x6e, 0x67, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x01, 0x52, 0x0d, 0x6e, 0x75, 0x6d, 0x4e, 0x6f, 0x6e, 0x4d, 0x69, 0x73, 0x73, 0x69, 0x6e, - 0x67, 0x12, 0x1f, 0x0a, 0x0b, 0x6e, 0x75, 0x6d, 0x5f, 0x6d, 0x69, 0x73, 0x73, 0x69, 0x6e, 0x67, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, 0x52, 0x0a, 0x6e, 0x75, 0x6d, 0x4d, 0x69, 0x73, 0x73, 0x69, - 0x6e, 0x67, 0x12, 0x24, 0x0a, 0x0e, 0x61, 0x76, 0x67, 0x5f, 0x6e, 0x75, 0x6d, 0x5f, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x01, 0x52, 0x0c, 0x61, 0x76, 0x67, 0x4e, - 0x75, 0x6d, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x24, 0x0a, 0x0e, 0x74, 0x6f, 0x74, 0x5f, - 0x6e, 0x75, 0x6d, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x01, - 0x52, 0x0c, 0x74, 0x6f, 0x74, 0x4e, 0x75, 0x6d, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x22, 0xe7, - 0x01, 0x0a, 0x0f, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, - 0x69, 0x63, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x03, 0x6e, 0x75, 0x6d, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x01, 0x48, 0x00, 0x52, 0x03, 0x6e, 0x75, 0x6d, 0x12, 0x12, 0x0a, 0x03, 0x73, 0x74, - 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x03, 0x73, 0x74, 0x72, 0x12, 0x41, - 0x0a, 0x09, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x18, 0x04, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x21, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, - 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x48, 0x69, 0x73, 0x74, 0x6f, - 0x67, 0x72, 0x61, 0x6d, 0x48, 0x00, 0x52, 0x09, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x67, 0x72, 0x61, - 0x6d, 0x12, 0x4e, 0x0a, 0x0e, 0x72, 0x61, 0x6e, 0x6b, 0x5f, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x67, - 0x72, 0x61, 0x6d, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x74, 0x65, 0x6e, 0x73, - 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, - 0x76, 0x30, 0x2e, 0x52, 0x61, 0x6e, 0x6b, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x67, 0x72, 0x61, 0x6d, - 0x48, 0x00, 0x52, 0x0d, 0x72, 0x61, 0x6e, 0x6b, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x67, 0x72, 0x61, - 0x6d, 0x42, 0x05, 0x0a, 0x03, 0x76, 0x61, 0x6c, 0x22, 0x92, 0x03, 0x0a, 0x11, 0x4e, 0x75, 0x6d, - 0x65, 0x72, 0x69, 0x63, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x73, 0x12, 0x4b, - 0x0a, 0x0c, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x73, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, - 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x43, 0x6f, - 0x6d, 0x6d, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x73, 0x52, 0x0b, - 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x6d, - 0x65, 0x61, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, 0x52, 0x04, 0x6d, 0x65, 0x61, 0x6e, 0x12, - 0x17, 0x0a, 0x07, 0x73, 0x74, 0x64, 0x5f, 0x64, 0x65, 0x76, 0x18, 0x03, 0x20, 0x01, 0x28, 0x01, - 0x52, 0x06, 0x73, 0x74, 0x64, 0x44, 0x65, 0x76, 0x12, 0x1b, 0x0a, 0x09, 0x6e, 0x75, 0x6d, 0x5f, - 0x7a, 0x65, 0x72, 0x6f, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x04, 0x52, 0x08, 0x6e, 0x75, 0x6d, - 0x5a, 0x65, 0x72, 0x6f, 0x73, 0x12, 0x10, 0x0a, 0x03, 0x6d, 0x69, 0x6e, 0x18, 0x05, 0x20, 0x01, - 0x28, 0x01, 0x52, 0x03, 0x6d, 0x69, 0x6e, 0x12, 0x16, 0x0a, 0x06, 0x6d, 0x65, 0x64, 0x69, 0x61, - 0x6e, 0x18, 0x06, 0x20, 0x01, 0x28, 0x01, 0x52, 0x06, 0x6d, 0x65, 0x64, 0x69, 0x61, 0x6e, 0x12, - 0x10, 0x0a, 0x03, 0x6d, 0x61, 0x78, 0x18, 0x07, 0x20, 0x01, 0x28, 0x01, 0x52, 0x03, 0x6d, 0x61, - 0x78, 0x12, 0x41, 0x0a, 0x0a, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x73, 0x18, - 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, - 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x48, - 0x69, 0x73, 0x74, 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x52, 0x0a, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x67, - 0x72, 0x61, 0x6d, 0x73, 0x12, 0x67, 0x0a, 0x16, 0x77, 0x65, 0x69, 0x67, 0x68, 0x74, 0x65, 0x64, - 0x5f, 0x6e, 0x75, 0x6d, 0x65, 0x72, 0x69, 0x63, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x73, 0x18, 0x09, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, - 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x57, 0x65, - 0x69, 0x67, 0x68, 0x74, 0x65, 0x64, 0x4e, 0x75, 0x6d, 0x65, 0x72, 0x69, 0x63, 0x53, 0x74, 0x61, - 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x73, 0x52, 0x14, 0x77, 0x65, 0x69, 0x67, 0x68, 0x74, 0x65, - 0x64, 0x4e, 0x75, 0x6d, 0x65, 0x72, 0x69, 0x63, 0x53, 0x74, 0x61, 0x74, 0x73, 0x22, 0x93, 0x04, - 0x0a, 0x10, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, - 0x63, 0x73, 0x12, 0x4b, 0x0a, 0x0c, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x5f, 0x73, 0x74, 0x61, - 0x74, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, - 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, - 0x30, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, - 0x63, 0x73, 0x52, 0x0b, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x73, 0x12, - 0x16, 0x0a, 0x06, 0x75, 0x6e, 0x69, 0x71, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, - 0x06, 0x75, 0x6e, 0x69, 0x71, 0x75, 0x65, 0x12, 0x54, 0x0a, 0x0a, 0x74, 0x6f, 0x70, 0x5f, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x35, 0x2e, 0x74, 0x65, - 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, - 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x53, 0x74, 0x61, 0x74, 0x69, - 0x73, 0x74, 0x69, 0x63, 0x73, 0x2e, 0x46, 0x72, 0x65, 0x71, 0x41, 0x6e, 0x64, 0x56, 0x61, 0x6c, - 0x75, 0x65, 0x52, 0x09, 0x74, 0x6f, 0x70, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x1d, 0x0a, - 0x0a, 0x61, 0x76, 0x67, 0x5f, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x18, 0x04, 0x20, 0x01, 0x28, - 0x02, 0x52, 0x09, 0x61, 0x76, 0x67, 0x4c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x12, 0x4c, 0x0a, 0x0e, - 0x72, 0x61, 0x6e, 0x6b, 0x5f, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x18, 0x05, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, - 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x52, 0x61, - 0x6e, 0x6b, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x52, 0x0d, 0x72, 0x61, 0x6e, - 0x6b, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x12, 0x64, 0x0a, 0x15, 0x77, 0x65, - 0x69, 0x67, 0x68, 0x74, 0x65, 0x64, 0x5f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x73, 0x74, - 0x61, 0x74, 0x73, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x74, 0x65, 0x6e, 0x73, - 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, - 0x76, 0x30, 0x2e, 0x57, 0x65, 0x69, 0x67, 0x68, 0x74, 0x65, 0x64, 0x53, 0x74, 0x72, 0x69, 0x6e, - 0x67, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x73, 0x52, 0x13, 0x77, 0x65, 0x69, - 0x67, 0x68, 0x74, 0x65, 0x64, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x53, 0x74, 0x61, 0x74, 0x73, - 0x12, 0x27, 0x0a, 0x0f, 0x76, 0x6f, 0x63, 0x61, 0x62, 0x75, 0x6c, 0x61, 0x72, 0x79, 0x5f, 0x66, - 0x69, 0x6c, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x76, 0x6f, 0x63, 0x61, 0x62, - 0x75, 0x6c, 0x61, 0x72, 0x79, 0x46, 0x69, 0x6c, 0x65, 0x1a, 0x48, 0x0a, 0x0c, 0x46, 0x72, 0x65, - 0x71, 0x41, 0x6e, 0x64, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, - 0x1c, 0x0a, 0x09, 0x66, 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x79, 0x18, 0x03, 0x20, 0x01, - 0x28, 0x01, 0x52, 0x09, 0x66, 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x79, 0x4a, 0x04, 0x08, - 0x01, 0x10, 0x02, 0x22, 0xa3, 0x01, 0x0a, 0x19, 0x57, 0x65, 0x69, 0x67, 0x68, 0x74, 0x65, 0x64, - 0x4e, 0x75, 0x6d, 0x65, 0x72, 0x69, 0x63, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, - 0x73, 0x12, 0x12, 0x0a, 0x04, 0x6d, 0x65, 0x61, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x01, 0x52, - 0x04, 0x6d, 0x65, 0x61, 0x6e, 0x12, 0x17, 0x0a, 0x07, 0x73, 0x74, 0x64, 0x5f, 0x64, 0x65, 0x76, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, 0x52, 0x06, 0x73, 0x74, 0x64, 0x44, 0x65, 0x76, 0x12, 0x16, - 0x0a, 0x06, 0x6d, 0x65, 0x64, 0x69, 0x61, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x01, 0x52, 0x06, - 0x6d, 0x65, 0x64, 0x69, 0x61, 0x6e, 0x12, 0x41, 0x0a, 0x0a, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x67, - 0x72, 0x61, 0x6d, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x74, 0x65, 0x6e, - 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, - 0x2e, 0x76, 0x30, 0x2e, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x52, 0x0a, 0x68, - 0x69, 0x73, 0x74, 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x73, 0x22, 0xbe, 0x01, 0x0a, 0x18, 0x57, 0x65, - 0x69, 0x67, 0x68, 0x74, 0x65, 0x64, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x53, 0x74, 0x61, 0x74, - 0x69, 0x73, 0x74, 0x69, 0x63, 0x73, 0x12, 0x54, 0x0a, 0x0a, 0x74, 0x6f, 0x70, 0x5f, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x35, 0x2e, 0x74, 0x65, 0x6e, - 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, - 0x2e, 0x76, 0x30, 0x2e, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, - 0x74, 0x69, 0x63, 0x73, 0x2e, 0x46, 0x72, 0x65, 0x71, 0x41, 0x6e, 0x64, 0x56, 0x61, 0x6c, 0x75, - 0x65, 0x52, 0x09, 0x74, 0x6f, 0x70, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x4c, 0x0a, 0x0e, - 0x72, 0x61, 0x6e, 0x6b, 0x5f, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, - 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x52, 0x61, - 0x6e, 0x6b, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x52, 0x0d, 0x72, 0x61, 0x6e, - 0x6b, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x22, 0xe2, 0x01, 0x0a, 0x0f, 0x42, - 0x79, 0x74, 0x65, 0x73, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x73, 0x12, 0x4b, - 0x0a, 0x0c, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x73, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, - 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x43, 0x6f, - 0x6d, 0x6d, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x73, 0x52, 0x0b, - 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x75, - 0x6e, 0x69, 0x71, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x06, 0x75, 0x6e, 0x69, - 0x71, 0x75, 0x65, 0x12, 0x22, 0x0a, 0x0d, 0x61, 0x76, 0x67, 0x5f, 0x6e, 0x75, 0x6d, 0x5f, 0x62, - 0x79, 0x74, 0x65, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x02, 0x52, 0x0b, 0x61, 0x76, 0x67, 0x4e, - 0x75, 0x6d, 0x42, 0x79, 0x74, 0x65, 0x73, 0x12, 0x22, 0x0a, 0x0d, 0x6d, 0x69, 0x6e, 0x5f, 0x6e, - 0x75, 0x6d, 0x5f, 0x62, 0x79, 0x74, 0x65, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x02, 0x52, 0x0b, - 0x6d, 0x69, 0x6e, 0x4e, 0x75, 0x6d, 0x42, 0x79, 0x74, 0x65, 0x73, 0x12, 0x22, 0x0a, 0x0d, 0x6d, - 0x61, 0x78, 0x5f, 0x6e, 0x75, 0x6d, 0x5f, 0x62, 0x79, 0x74, 0x65, 0x73, 0x18, 0x05, 0x20, 0x01, - 0x28, 0x02, 0x52, 0x0b, 0x6d, 0x61, 0x78, 0x4e, 0x75, 0x6d, 0x42, 0x79, 0x74, 0x65, 0x73, 0x22, - 0x5f, 0x0a, 0x10, 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, - 0x69, 0x63, 0x73, 0x12, 0x4b, 0x0a, 0x0c, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x5f, 0x73, 0x74, - 0x61, 0x74, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x74, 0x65, 0x6e, 0x73, - 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, - 0x76, 0x30, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, - 0x69, 0x63, 0x73, 0x52, 0x0b, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x73, - 0x22, 0x94, 0x04, 0x0a, 0x10, 0x43, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x69, - 0x73, 0x74, 0x69, 0x63, 0x73, 0x12, 0x26, 0x0a, 0x0f, 0x6e, 0x75, 0x6d, 0x5f, 0x6e, 0x6f, 0x6e, - 0x5f, 0x6d, 0x69, 0x73, 0x73, 0x69, 0x6e, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0d, - 0x6e, 0x75, 0x6d, 0x4e, 0x6f, 0x6e, 0x4d, 0x69, 0x73, 0x73, 0x69, 0x6e, 0x67, 0x12, 0x1f, 0x0a, - 0x0b, 0x6e, 0x75, 0x6d, 0x5f, 0x6d, 0x69, 0x73, 0x73, 0x69, 0x6e, 0x67, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x04, 0x52, 0x0a, 0x6e, 0x75, 0x6d, 0x4d, 0x69, 0x73, 0x73, 0x69, 0x6e, 0x67, 0x12, 0x24, - 0x0a, 0x0e, 0x6d, 0x69, 0x6e, 0x5f, 0x6e, 0x75, 0x6d, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, - 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0c, 0x6d, 0x69, 0x6e, 0x4e, 0x75, 0x6d, 0x56, 0x61, - 0x6c, 0x75, 0x65, 0x73, 0x12, 0x24, 0x0a, 0x0e, 0x6d, 0x61, 0x78, 0x5f, 0x6e, 0x75, 0x6d, 0x5f, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0c, 0x6d, 0x61, - 0x78, 0x4e, 0x75, 0x6d, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x24, 0x0a, 0x0e, 0x61, 0x76, - 0x67, 0x5f, 0x6e, 0x75, 0x6d, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x05, 0x20, 0x01, - 0x28, 0x02, 0x52, 0x0c, 0x61, 0x76, 0x67, 0x4e, 0x75, 0x6d, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, - 0x12, 0x24, 0x0a, 0x0e, 0x74, 0x6f, 0x74, 0x5f, 0x6e, 0x75, 0x6d, 0x5f, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x73, 0x18, 0x08, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0c, 0x74, 0x6f, 0x74, 0x4e, 0x75, 0x6d, - 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x53, 0x0a, 0x14, 0x6e, 0x75, 0x6d, 0x5f, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x73, 0x5f, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x18, 0x06, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, - 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x48, 0x69, - 0x73, 0x74, 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x52, 0x12, 0x6e, 0x75, 0x6d, 0x56, 0x61, 0x6c, 0x75, - 0x65, 0x73, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x12, 0x64, 0x0a, 0x15, 0x77, - 0x65, 0x69, 0x67, 0x68, 0x74, 0x65, 0x64, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x5f, 0x73, - 0x74, 0x61, 0x74, 0x73, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x74, 0x65, 0x6e, - 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, - 0x2e, 0x76, 0x30, 0x2e, 0x57, 0x65, 0x69, 0x67, 0x68, 0x74, 0x65, 0x64, 0x43, 0x6f, 0x6d, 0x6d, - 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x73, 0x52, 0x13, 0x77, 0x65, - 0x69, 0x67, 0x68, 0x74, 0x65, 0x64, 0x43, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, - 0x73, 0x12, 0x64, 0x0a, 0x1d, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x5f, 0x6c, 0x69, 0x73, - 0x74, 0x5f, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x5f, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x67, 0x72, - 0x61, 0x6d, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, - 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, - 0x30, 0x2e, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x52, 0x1a, 0x66, 0x65, 0x61, - 0x74, 0x75, 0x72, 0x65, 0x4c, 0x69, 0x73, 0x74, 0x4c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x48, 0x69, - 0x73, 0x74, 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x22, 0x83, 0x03, 0x0a, 0x09, 0x48, 0x69, 0x73, 0x74, - 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x12, 0x17, 0x0a, 0x07, 0x6e, 0x75, 0x6d, 0x5f, 0x6e, 0x61, 0x6e, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x06, 0x6e, 0x75, 0x6d, 0x4e, 0x61, 0x6e, 0x12, 0x23, - 0x0a, 0x0d, 0x6e, 0x75, 0x6d, 0x5f, 0x75, 0x6e, 0x64, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x64, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0c, 0x6e, 0x75, 0x6d, 0x55, 0x6e, 0x64, 0x65, 0x66, 0x69, - 0x6e, 0x65, 0x64, 0x12, 0x42, 0x0a, 0x07, 0x62, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x73, 0x18, 0x03, - 0x20, 0x03, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, - 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x48, 0x69, - 0x73, 0x74, 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x2e, 0x42, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x52, 0x07, - 0x62, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x73, 0x12, 0x43, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, - 0x04, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x2f, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, - 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x48, - 0x69, 0x73, 0x74, 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x2e, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x67, 0x72, - 0x61, 0x6d, 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x12, 0x0a, 0x04, - 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, - 0x1a, 0x6d, 0x0a, 0x06, 0x42, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x6c, 0x6f, - 0x77, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x01, 0x52, 0x08, 0x6c, - 0x6f, 0x77, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x68, 0x69, 0x67, 0x68, 0x5f, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, 0x52, 0x09, 0x68, 0x69, 0x67, - 0x68, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x21, 0x0a, 0x0c, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x65, - 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x01, 0x52, 0x0b, 0x73, 0x61, - 0x6d, 0x70, 0x6c, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x4a, 0x04, 0x08, 0x03, 0x10, 0x04, 0x22, - 0x2c, 0x0a, 0x0d, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x54, 0x79, 0x70, 0x65, - 0x12, 0x0c, 0x0a, 0x08, 0x53, 0x54, 0x41, 0x4e, 0x44, 0x41, 0x52, 0x44, 0x10, 0x00, 0x12, 0x0d, - 0x0a, 0x09, 0x51, 0x55, 0x41, 0x4e, 0x54, 0x49, 0x4c, 0x45, 0x53, 0x10, 0x01, 0x22, 0xec, 0x01, - 0x0a, 0x0d, 0x52, 0x61, 0x6e, 0x6b, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x12, - 0x46, 0x0a, 0x07, 0x62, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x2c, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, - 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x52, 0x61, 0x6e, 0x6b, 0x48, 0x69, - 0x73, 0x74, 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x2e, 0x42, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x52, 0x07, - 0x62, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x1a, 0x7f, 0x0a, 0x06, 0x42, - 0x75, 0x63, 0x6b, 0x65, 0x74, 0x12, 0x19, 0x0a, 0x08, 0x6c, 0x6f, 0x77, 0x5f, 0x72, 0x61, 0x6e, - 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x07, 0x6c, 0x6f, 0x77, 0x52, 0x61, 0x6e, 0x6b, - 0x12, 0x1b, 0x0a, 0x09, 0x68, 0x69, 0x67, 0x68, 0x5f, 0x72, 0x61, 0x6e, 0x6b, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x04, 0x52, 0x08, 0x68, 0x69, 0x67, 0x68, 0x52, 0x61, 0x6e, 0x6b, 0x12, 0x14, 0x0a, - 0x05, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6c, 0x61, - 0x62, 0x65, 0x6c, 0x12, 0x21, 0x0a, 0x0c, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x5f, 0x63, 0x6f, - 0x75, 0x6e, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x01, 0x52, 0x0b, 0x73, 0x61, 0x6d, 0x70, 0x6c, - 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x4a, 0x04, 0x08, 0x03, 0x10, 0x04, 0x42, 0x68, 0x0a, 0x1a, - 0x6f, 0x72, 0x67, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, - 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x50, 0x01, 0x5a, 0x45, 0x67, 0x69, - 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2d, 0x64, - 0x65, 0x76, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x73, 0x64, 0x6b, 0x2f, 0x67, 0x6f, 0x2f, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, - 0x77, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x2f, 0x76, 0x30, 0xf8, 0x01, 0x01, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} - -var ( - file_tensorflow_metadata_proto_v0_statistics_proto_rawDescOnce sync.Once - file_tensorflow_metadata_proto_v0_statistics_proto_rawDescData = file_tensorflow_metadata_proto_v0_statistics_proto_rawDesc -) - -func file_tensorflow_metadata_proto_v0_statistics_proto_rawDescGZIP() []byte { - file_tensorflow_metadata_proto_v0_statistics_proto_rawDescOnce.Do(func() { - file_tensorflow_metadata_proto_v0_statistics_proto_rawDescData = protoimpl.X.CompressGZIP(file_tensorflow_metadata_proto_v0_statistics_proto_rawDescData) - }) - return file_tensorflow_metadata_proto_v0_statistics_proto_rawDescData -} - -var file_tensorflow_metadata_proto_v0_statistics_proto_enumTypes = make([]protoimpl.EnumInfo, 2) -var file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes = make([]protoimpl.MessageInfo, 24) -var file_tensorflow_metadata_proto_v0_statistics_proto_goTypes = []interface{}{ - (FeatureNameStatistics_Type)(0), // 0: tensorflow.metadata.v0.FeatureNameStatistics.Type - (Histogram_HistogramType)(0), // 1: tensorflow.metadata.v0.Histogram.HistogramType - (*DatasetFeatureStatisticsList)(nil), // 2: tensorflow.metadata.v0.DatasetFeatureStatisticsList - (*DatasetFeatureStatistics)(nil), // 3: tensorflow.metadata.v0.DatasetFeatureStatistics - (*CrossFeatureStatistics)(nil), // 4: tensorflow.metadata.v0.CrossFeatureStatistics - (*NumericCrossStatistics)(nil), // 5: tensorflow.metadata.v0.NumericCrossStatistics - (*CategoricalCrossStatistics)(nil), // 6: tensorflow.metadata.v0.CategoricalCrossStatistics - (*LiftStatistics)(nil), // 7: tensorflow.metadata.v0.LiftStatistics - (*LiftSeries)(nil), // 8: tensorflow.metadata.v0.LiftSeries - (*FeatureNameStatistics)(nil), // 9: tensorflow.metadata.v0.FeatureNameStatistics - (*WeightedCommonStatistics)(nil), // 10: tensorflow.metadata.v0.WeightedCommonStatistics - (*CustomStatistic)(nil), // 11: tensorflow.metadata.v0.CustomStatistic - (*NumericStatistics)(nil), // 12: tensorflow.metadata.v0.NumericStatistics - (*StringStatistics)(nil), // 13: tensorflow.metadata.v0.StringStatistics - (*WeightedNumericStatistics)(nil), // 14: tensorflow.metadata.v0.WeightedNumericStatistics - (*WeightedStringStatistics)(nil), // 15: tensorflow.metadata.v0.WeightedStringStatistics - (*BytesStatistics)(nil), // 16: tensorflow.metadata.v0.BytesStatistics - (*StructStatistics)(nil), // 17: tensorflow.metadata.v0.StructStatistics - (*CommonStatistics)(nil), // 18: tensorflow.metadata.v0.CommonStatistics - (*Histogram)(nil), // 19: tensorflow.metadata.v0.Histogram - (*RankHistogram)(nil), // 20: tensorflow.metadata.v0.RankHistogram - (*LiftSeries_Bucket)(nil), // 21: tensorflow.metadata.v0.LiftSeries.Bucket - (*LiftSeries_LiftValue)(nil), // 22: tensorflow.metadata.v0.LiftSeries.LiftValue - (*StringStatistics_FreqAndValue)(nil), // 23: tensorflow.metadata.v0.StringStatistics.FreqAndValue - (*Histogram_Bucket)(nil), // 24: tensorflow.metadata.v0.Histogram.Bucket - (*RankHistogram_Bucket)(nil), // 25: tensorflow.metadata.v0.RankHistogram.Bucket - (*Path)(nil), // 26: tensorflow.metadata.v0.Path -} -var file_tensorflow_metadata_proto_v0_statistics_proto_depIdxs = []int32{ - 3, // 0: tensorflow.metadata.v0.DatasetFeatureStatisticsList.datasets:type_name -> tensorflow.metadata.v0.DatasetFeatureStatistics - 9, // 1: tensorflow.metadata.v0.DatasetFeatureStatistics.features:type_name -> tensorflow.metadata.v0.FeatureNameStatistics - 4, // 2: tensorflow.metadata.v0.DatasetFeatureStatistics.cross_features:type_name -> tensorflow.metadata.v0.CrossFeatureStatistics - 26, // 3: tensorflow.metadata.v0.CrossFeatureStatistics.path_x:type_name -> tensorflow.metadata.v0.Path - 26, // 4: tensorflow.metadata.v0.CrossFeatureStatistics.path_y:type_name -> tensorflow.metadata.v0.Path - 5, // 5: tensorflow.metadata.v0.CrossFeatureStatistics.num_cross_stats:type_name -> tensorflow.metadata.v0.NumericCrossStatistics - 6, // 6: tensorflow.metadata.v0.CrossFeatureStatistics.categorical_cross_stats:type_name -> tensorflow.metadata.v0.CategoricalCrossStatistics - 7, // 7: tensorflow.metadata.v0.CategoricalCrossStatistics.lift:type_name -> tensorflow.metadata.v0.LiftStatistics - 8, // 8: tensorflow.metadata.v0.LiftStatistics.lift_series:type_name -> tensorflow.metadata.v0.LiftSeries - 8, // 9: tensorflow.metadata.v0.LiftStatistics.weighted_lift_series:type_name -> tensorflow.metadata.v0.LiftSeries - 21, // 10: tensorflow.metadata.v0.LiftSeries.y_bucket:type_name -> tensorflow.metadata.v0.LiftSeries.Bucket - 22, // 11: tensorflow.metadata.v0.LiftSeries.lift_values:type_name -> tensorflow.metadata.v0.LiftSeries.LiftValue - 26, // 12: tensorflow.metadata.v0.FeatureNameStatistics.path:type_name -> tensorflow.metadata.v0.Path - 0, // 13: tensorflow.metadata.v0.FeatureNameStatistics.type:type_name -> tensorflow.metadata.v0.FeatureNameStatistics.Type - 12, // 14: tensorflow.metadata.v0.FeatureNameStatistics.num_stats:type_name -> tensorflow.metadata.v0.NumericStatistics - 13, // 15: tensorflow.metadata.v0.FeatureNameStatistics.string_stats:type_name -> tensorflow.metadata.v0.StringStatistics - 16, // 16: tensorflow.metadata.v0.FeatureNameStatistics.bytes_stats:type_name -> tensorflow.metadata.v0.BytesStatistics - 17, // 17: tensorflow.metadata.v0.FeatureNameStatistics.struct_stats:type_name -> tensorflow.metadata.v0.StructStatistics - 11, // 18: tensorflow.metadata.v0.FeatureNameStatistics.custom_stats:type_name -> tensorflow.metadata.v0.CustomStatistic - 19, // 19: tensorflow.metadata.v0.CustomStatistic.histogram:type_name -> tensorflow.metadata.v0.Histogram - 20, // 20: tensorflow.metadata.v0.CustomStatistic.rank_histogram:type_name -> tensorflow.metadata.v0.RankHistogram - 18, // 21: tensorflow.metadata.v0.NumericStatistics.common_stats:type_name -> tensorflow.metadata.v0.CommonStatistics - 19, // 22: tensorflow.metadata.v0.NumericStatistics.histograms:type_name -> tensorflow.metadata.v0.Histogram - 14, // 23: tensorflow.metadata.v0.NumericStatistics.weighted_numeric_stats:type_name -> tensorflow.metadata.v0.WeightedNumericStatistics - 18, // 24: tensorflow.metadata.v0.StringStatistics.common_stats:type_name -> tensorflow.metadata.v0.CommonStatistics - 23, // 25: tensorflow.metadata.v0.StringStatistics.top_values:type_name -> tensorflow.metadata.v0.StringStatistics.FreqAndValue - 20, // 26: tensorflow.metadata.v0.StringStatistics.rank_histogram:type_name -> tensorflow.metadata.v0.RankHistogram - 15, // 27: tensorflow.metadata.v0.StringStatistics.weighted_string_stats:type_name -> tensorflow.metadata.v0.WeightedStringStatistics - 19, // 28: tensorflow.metadata.v0.WeightedNumericStatistics.histograms:type_name -> tensorflow.metadata.v0.Histogram - 23, // 29: tensorflow.metadata.v0.WeightedStringStatistics.top_values:type_name -> tensorflow.metadata.v0.StringStatistics.FreqAndValue - 20, // 30: tensorflow.metadata.v0.WeightedStringStatistics.rank_histogram:type_name -> tensorflow.metadata.v0.RankHistogram - 18, // 31: tensorflow.metadata.v0.BytesStatistics.common_stats:type_name -> tensorflow.metadata.v0.CommonStatistics - 18, // 32: tensorflow.metadata.v0.StructStatistics.common_stats:type_name -> tensorflow.metadata.v0.CommonStatistics - 19, // 33: tensorflow.metadata.v0.CommonStatistics.num_values_histogram:type_name -> tensorflow.metadata.v0.Histogram - 10, // 34: tensorflow.metadata.v0.CommonStatistics.weighted_common_stats:type_name -> tensorflow.metadata.v0.WeightedCommonStatistics - 19, // 35: tensorflow.metadata.v0.CommonStatistics.feature_list_length_histogram:type_name -> tensorflow.metadata.v0.Histogram - 24, // 36: tensorflow.metadata.v0.Histogram.buckets:type_name -> tensorflow.metadata.v0.Histogram.Bucket - 1, // 37: tensorflow.metadata.v0.Histogram.type:type_name -> tensorflow.metadata.v0.Histogram.HistogramType - 25, // 38: tensorflow.metadata.v0.RankHistogram.buckets:type_name -> tensorflow.metadata.v0.RankHistogram.Bucket - 39, // [39:39] is the sub-list for method output_type - 39, // [39:39] is the sub-list for method input_type - 39, // [39:39] is the sub-list for extension type_name - 39, // [39:39] is the sub-list for extension extendee - 0, // [0:39] is the sub-list for field type_name -} - -func init() { file_tensorflow_metadata_proto_v0_statistics_proto_init() } -func file_tensorflow_metadata_proto_v0_statistics_proto_init() { - if File_tensorflow_metadata_proto_v0_statistics_proto != nil { - return - } - file_tensorflow_metadata_proto_v0_path_proto_init() - if !protoimpl.UnsafeEnabled { - file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DatasetFeatureStatisticsList); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DatasetFeatureStatistics); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CrossFeatureStatistics); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*NumericCrossStatistics); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CategoricalCrossStatistics); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*LiftStatistics); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*LiftSeries); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*FeatureNameStatistics); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*WeightedCommonStatistics); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CustomStatistic); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*NumericStatistics); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*StringStatistics); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*WeightedNumericStatistics); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*WeightedStringStatistics); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*BytesStatistics); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*StructStatistics); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CommonStatistics); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Histogram); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*RankHistogram); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*LiftSeries_Bucket); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*LiftSeries_LiftValue); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[21].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*StringStatistics_FreqAndValue); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Histogram_Bucket); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[23].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*RankHistogram_Bucket); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[2].OneofWrappers = []interface{}{ - (*CrossFeatureStatistics_NumCrossStats)(nil), - (*CrossFeatureStatistics_CategoricalCrossStats)(nil), - } - file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[6].OneofWrappers = []interface{}{ - (*LiftSeries_YInt)(nil), - (*LiftSeries_YString)(nil), - (*LiftSeries_YBucket)(nil), - (*LiftSeries_YCount)(nil), - (*LiftSeries_WeightedYCount)(nil), - } - file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[7].OneofWrappers = []interface{}{ - (*FeatureNameStatistics_Name)(nil), - (*FeatureNameStatistics_Path)(nil), - (*FeatureNameStatistics_NumStats)(nil), - (*FeatureNameStatistics_StringStats)(nil), - (*FeatureNameStatistics_BytesStats)(nil), - (*FeatureNameStatistics_StructStats)(nil), - } - file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[9].OneofWrappers = []interface{}{ - (*CustomStatistic_Num)(nil), - (*CustomStatistic_Str)(nil), - (*CustomStatistic_Histogram)(nil), - (*CustomStatistic_RankHistogram)(nil), - } - file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes[20].OneofWrappers = []interface{}{ - (*LiftSeries_LiftValue_XInt)(nil), - (*LiftSeries_LiftValue_XString)(nil), - (*LiftSeries_LiftValue_XCount)(nil), - (*LiftSeries_LiftValue_WeightedXCount)(nil), - (*LiftSeries_LiftValue_XAndYCount)(nil), - (*LiftSeries_LiftValue_WeightedXAndYCount)(nil), - } - type x struct{} - out := protoimpl.TypeBuilder{ - File: protoimpl.DescBuilder{ - GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_tensorflow_metadata_proto_v0_statistics_proto_rawDesc, - NumEnums: 2, - NumMessages: 24, - NumExtensions: 0, - NumServices: 0, - }, - GoTypes: file_tensorflow_metadata_proto_v0_statistics_proto_goTypes, - DependencyIndexes: file_tensorflow_metadata_proto_v0_statistics_proto_depIdxs, - EnumInfos: file_tensorflow_metadata_proto_v0_statistics_proto_enumTypes, - MessageInfos: file_tensorflow_metadata_proto_v0_statistics_proto_msgTypes, - }.Build() - File_tensorflow_metadata_proto_v0_statistics_proto = out.File - file_tensorflow_metadata_proto_v0_statistics_proto_rawDesc = nil - file_tensorflow_metadata_proto_v0_statistics_proto_goTypes = nil - file_tensorflow_metadata_proto_v0_statistics_proto_depIdxs = nil -} diff --git a/sdk/go/request.go b/sdk/go/request.go deleted file mode 100644 index 360603b3a3..0000000000 --- a/sdk/go/request.go +++ /dev/null @@ -1,92 +0,0 @@ -package feast - -import ( - "fmt" - "github.com/feast-dev/feast/sdk/go/protos/feast/serving" - "strings" -) - -var ( - // ErrInvalidFeatureRef indicates that the user has provided a feature reference - // with the wrong structure or contents - ErrInvalidFeatureRef = "Invalid Feature Reference %s provided, " + - "feature reference must be in the format featureTableName:featureName" -) - -// OnlineFeaturesRequest wrapper on feast.serving.GetOnlineFeaturesRequestV2. -type OnlineFeaturesRequest struct { - // Features is the list of features to obtain from Feast. Each feature can be given as - // the format feature_table:feature, where "feature_table" & "feature" are feature table name - // and feature name respectively. The only required components is feature name. - Features []string - - // Entities is the list of entity rows to retrieve features on. Each row is a map of entity name to entity value. - Entities []Row - - // Project optionally specifies the project override. If specified, uses given project for retrieval. - // Overrides the projects specified in Feature References if also specified. - Project string -} - -// Builds the feast-specified request payload from the wrapper. -func (r OnlineFeaturesRequest) buildRequest() (*serving.GetOnlineFeaturesRequestV2, error) { - featureRefs, err := buildFeatureRefs(r.Features) - if err != nil { - return nil, err - } - - // build request entity rows from native entities - entityRows := make([]*serving.GetOnlineFeaturesRequestV2_EntityRow, len(r.Entities)) - for i, entity := range r.Entities { - entityRows[i] = &serving.GetOnlineFeaturesRequestV2_EntityRow{ - Fields: entity, - } - } - - return &serving.GetOnlineFeaturesRequestV2{ - Features: featureRefs, - EntityRows: entityRows, - Project: r.Project, - }, nil -} - -// Creates a slice of FeatureReferences from string representation in -// the format featuretable:feature. -// featureRefStrs - string feature references to parse. -// Returns parsed FeatureReferences. -// Returns an error when the format of the string feature reference is invalid -func buildFeatureRefs(featureRefStrs []string) ([]*serving.FeatureReferenceV2, error) { - var featureRefs []*serving.FeatureReferenceV2 - - for _, featureRefStr := range featureRefStrs { - featureRef, err := parseFeatureRef(featureRefStr) - if err != nil { - return nil, err - } - featureRefs = append(featureRefs, featureRef) - } - return featureRefs, nil -} - -// Parses a string FeatureReference into FeatureReference proto -// featureRefStr - the string feature reference to parse. -// Returns parsed FeatureReference. -// Returns an error when the format of the string feature reference is invalid -func parseFeatureRef(featureRefStr string) (*serving.FeatureReferenceV2, error) { - if len(featureRefStr) == 0 { - return nil, fmt.Errorf(ErrInvalidFeatureRef, featureRefStr) - } - - var featureRef serving.FeatureReferenceV2 - if strings.Contains(featureRefStr, "/") || !strings.Contains(featureRefStr, ":") { - return nil, fmt.Errorf(ErrInvalidFeatureRef, featureRefStr) - } - // parse featuretable if specified - if strings.Contains(featureRefStr, ":") { - refSplit := strings.Split(featureRefStr, ":") - featureRef.FeatureTable, featureRefStr = refSplit[0], refSplit[1] - } - featureRef.Name = featureRefStr - - return &featureRef, nil -} diff --git a/sdk/go/request_test.go b/sdk/go/request_test.go deleted file mode 100644 index 0e9b89d119..0000000000 --- a/sdk/go/request_test.go +++ /dev/null @@ -1,108 +0,0 @@ -package feast - -import ( - "fmt" - "github.com/feast-dev/feast/sdk/go/protos/feast/serving" - "github.com/feast-dev/feast/sdk/go/protos/feast/types" - json "github.com/golang/protobuf/jsonpb" - "github.com/golang/protobuf/proto" - "testing" -) - -func TestGetOnlineFeaturesRequest(t *testing.T) { - tt := []struct { - name string - req OnlineFeaturesRequest - want *serving.GetOnlineFeaturesRequestV2 - wantErr bool - err error - }{ - { - name: "valid", - req: OnlineFeaturesRequest{ - Features: []string{ - "driver:driver_id", - }, - Entities: []Row{ - {"entity1": Int64Val(1), "entity2": StrVal("bob")}, - {"entity1": Int64Val(1), "entity2": StrVal("annie")}, - {"entity1": Int64Val(1), "entity2": StrVal("jane")}, - }, - Project: "driver_project", - }, - want: &serving.GetOnlineFeaturesRequestV2{ - Features: []*serving.FeatureReferenceV2{ - { - FeatureTable: "driver", - Name: "driver_id", - }, - }, - EntityRows: []*serving.GetOnlineFeaturesRequestV2_EntityRow{ - { - Fields: map[string]*types.Value{ - "entity1": Int64Val(1), - "entity2": StrVal("bob"), - }, - }, - { - Fields: map[string]*types.Value{ - "entity1": Int64Val(1), - "entity2": StrVal("annie"), - }, - }, - { - Fields: map[string]*types.Value{ - "entity1": Int64Val(1), - "entity2": StrVal("jane"), - }, - }, - }, - Project: "driver_project", - }, - wantErr: false, - err: nil, - }, - { - name: "invalid_feature_name/wrong_format", - req: OnlineFeaturesRequest{ - Features: []string{"/fs1:feature1"}, - Entities: []Row{}, - Project: "my_project", - }, - wantErr: true, - err: fmt.Errorf(ErrInvalidFeatureRef, "/fs1:feature1"), - }, - { - name: "invalid_feature_name", - req: OnlineFeaturesRequest{ - Features: []string{"feature1"}, - Entities: []Row{}, - Project: "my_project", - }, - wantErr: true, - err: fmt.Errorf(ErrInvalidFeatureRef, "feature1"), - }, - } - for _, tc := range tt { - t.Run(tc.name, func(t *testing.T) { - got, err := tc.req.buildRequest() - - if (err != nil) != tc.wantErr { - t.Errorf("error = %v, wantErr %v", err, tc.wantErr) - return - } - - if tc.wantErr && err.Error() != tc.err.Error() { - t.Errorf("error = %v, expected err = %v", err, tc.err) - return - } - - if !proto.Equal(got, tc.want) { - m := json.Marshaler{} - gotJSON, _ := m.MarshalToString(got) - wantJSON, _ := m.MarshalToString(tc.want) - t.Errorf("got: \n%v\nwant:\n%v", gotJSON, wantJSON) - } - }) - } -} diff --git a/sdk/go/response.go b/sdk/go/response.go deleted file mode 100644 index 7fa50761b6..0000000000 --- a/sdk/go/response.go +++ /dev/null @@ -1,96 +0,0 @@ -package feast - -import ( - "fmt" - "github.com/feast-dev/feast/sdk/go/protos/feast/serving" - "github.com/feast-dev/feast/sdk/go/protos/feast/types" -) - -var ( - // ErrLengthMismatch indicates that the number of values returned is not the same as the number of values requested - ErrLengthMismatch = "Length mismatch; number of na values (%d) not equal to number of features requested (%d)." - - // ErrFeatureNotFound indicates that the a requested feature was not found in the response - ErrFeatureNotFound = "Feature %s not found in response." - - // ErrTypeMismatch indicates that the there was a type mismatch in the returned values - ErrTypeMismatch = "Requested output of type %s does not match type of feature value returned." -) - -// OnlineFeaturesResponse is a wrapper around serving.GetOnlineFeaturesResponse. -type OnlineFeaturesResponse struct { - RawResponse *serving.GetOnlineFeaturesResponse -} - -// Rows retrieves the result of the request as a list of Rows. -func (r OnlineFeaturesResponse) Rows() []Row { - rows := make([]Row, len(r.RawResponse.FieldValues)) - for i, fieldValues := range r.RawResponse.FieldValues { - rows[i] = fieldValues.Fields - } - return rows -} - -// Statuses retrieves field level status metadata for each row in Rows(). -// Each status map returned maps status 1:1 to each returned row from Rows() -func (r OnlineFeaturesResponse) Statuses() []map[string]serving.GetOnlineFeaturesResponse_FieldStatus { - statuses := make([]map[string]serving.GetOnlineFeaturesResponse_FieldStatus, len(r.RawResponse.FieldValues)) - for i, fieldValues := range r.RawResponse.FieldValues { - statuses[i] = fieldValues.Statuses - } - return statuses -} - -// Int64Arrays retrieves the result of the request as a list of int64 slices. Any missing values will be filled -// with the missing values provided. -func (r OnlineFeaturesResponse) Int64Arrays(order []string, fillNa []int64) ([][]int64, error) { - rows := make([][]int64, len(r.RawResponse.FieldValues)) - if len(fillNa) != len(order) { - return nil, fmt.Errorf(ErrLengthMismatch, len(fillNa), len(order)) - } - for i, fieldValues := range r.RawResponse.FieldValues { - rows[i] = make([]int64, len(order)) - for j, fname := range order { - value, exists := fieldValues.Fields[fname] - if !exists { - return nil, fmt.Errorf(ErrFeatureNotFound, fname) - } - valType := value.GetVal() - if valType == nil { - rows[i][j] = fillNa[j] - } else if int64Val, ok := valType.(*types.Value_Int64Val); ok { - rows[i][j] = int64Val.Int64Val - } else { - return nil, fmt.Errorf(ErrTypeMismatch, "int64") - } - } - } - return rows, nil -} - -// Float64Arrays retrieves the result of the request as a list of float64 slices. Any missing values will be filled -// with the missing values provided. -func (r OnlineFeaturesResponse) Float64Arrays(order []string, fillNa []float64) ([][]float64, error) { - rows := make([][]float64, len(r.RawResponse.FieldValues)) - if len(fillNa) != len(order) { - return nil, fmt.Errorf(ErrLengthMismatch, len(fillNa), len(order)) - } - for i, records := range r.RawResponse.FieldValues { - rows[i] = make([]float64, len(order)) - for j, fname := range order { - value, exists := records.Fields[fname] - if !exists { - return nil, fmt.Errorf(ErrFeatureNotFound, fname) - } - valType := value.GetVal() - if valType == nil { - rows[i][j] = fillNa[j] - } else if doubleVal, ok := valType.(*types.Value_DoubleVal); ok { - rows[i][j] = doubleVal.DoubleVal - } else { - return nil, fmt.Errorf(ErrTypeMismatch, "float64") - } - } - } - return rows, nil -} diff --git a/sdk/go/response_test.go b/sdk/go/response_test.go deleted file mode 100644 index a617652745..0000000000 --- a/sdk/go/response_test.go +++ /dev/null @@ -1,134 +0,0 @@ -package feast - -import ( - "fmt" - "github.com/feast-dev/feast/sdk/go/protos/feast/serving" - "github.com/feast-dev/feast/sdk/go/protos/feast/types" - "github.com/google/go-cmp/cmp" - "testing" -) - -var response = OnlineFeaturesResponse{ - RawResponse: &serving.GetOnlineFeaturesResponse{ - FieldValues: []*serving.GetOnlineFeaturesResponse_FieldValues{ - { - Fields: map[string]*types.Value{ - "featuretable1:feature1": Int64Val(1), - "featuretable1:feature2": {}, - }, - Statuses: map[string]serving.GetOnlineFeaturesResponse_FieldStatus{ - "featuretable1:feature1": serving.GetOnlineFeaturesResponse_PRESENT, - "featuretable1:feature2": serving.GetOnlineFeaturesResponse_NULL_VALUE, - }, - }, - { - Fields: map[string]*types.Value{ - "featuretable1:feature1": Int64Val(2), - "featuretable1:feature2": Int64Val(2), - }, - Statuses: map[string]serving.GetOnlineFeaturesResponse_FieldStatus{ - "featuretable1:feature1": serving.GetOnlineFeaturesResponse_PRESENT, - "featuretable1:feature2": serving.GetOnlineFeaturesResponse_PRESENT, - }, - }, - }, - }, -} - -func TestOnlineFeaturesResponseToRow(t *testing.T) { - actual := response.Rows() - expected := []Row{ - {"featuretable1:feature1": Int64Val(1), "featuretable1:feature2": &types.Value{}}, - {"featuretable1:feature1": Int64Val(2), "featuretable1:feature2": Int64Val(2)}, - } - if len(expected) != len(actual) { - t.Errorf("expected: %v, got: %v", expected, actual) - } - for i := range expected { - if !expected[i].equalTo(actual[i]) { - t.Errorf("expected: %v, got: %v", expected, actual) - } - } -} - -func TestOnlineFeaturesResponseoToStatuses(t *testing.T) { - actual := response.Statuses() - expected := []map[string]serving.GetOnlineFeaturesResponse_FieldStatus{ - { - "featuretable1:feature1": serving.GetOnlineFeaturesResponse_PRESENT, - "featuretable1:feature2": serving.GetOnlineFeaturesResponse_NULL_VALUE, - }, - { - "featuretable1:feature1": serving.GetOnlineFeaturesResponse_PRESENT, - "featuretable1:feature2": serving.GetOnlineFeaturesResponse_PRESENT, - }, - } - if len(expected) != len(actual) { - t.Errorf("expected: %v, got: %v", expected, actual) - } - for i := range expected { - if !cmp.Equal(expected[i], actual[i]) { - t.Errorf("expected: %v, got: %v", expected, actual) - } - } -} - -func TestOnlineFeaturesResponseToInt64Array(t *testing.T) { - type args struct { - order []string - fillNa []int64 - } - tt := []struct { - name string - args args - want [][]int64 - wantErr bool - err error - }{ - { - name: "valid", - args: args{ - order: []string{"featuretable1:feature2", "featuretable1:feature1"}, - fillNa: []int64{-1, -1}, - }, - want: [][]int64{{-1, 1}, {2, 2}}, - wantErr: false, - }, - { - name: "length mismatch", - args: args{ - order: []string{"ft:feature2", "ft:feature1"}, - fillNa: []int64{-1}, - }, - want: nil, - wantErr: true, - err: fmt.Errorf(ErrLengthMismatch, 1, 2), - }, - { - name: "length mismatch", - args: args{ - order: []string{"featuretable1:feature2", "featuretable1:feature3"}, - fillNa: []int64{-1, -1}, - }, - want: nil, - wantErr: true, - err: fmt.Errorf(ErrFeatureNotFound, "featuretable1:feature3"), - }, - } - for _, tc := range tt { - t.Run(tc.name, func(t *testing.T) { - got, err := response.Int64Arrays(tc.args.order, tc.args.fillNa) - if (err != nil) != tc.wantErr { - t.Errorf("error = %v, wantErr %v", err, tc.wantErr) - return - } - if tc.wantErr && err.Error() != tc.err.Error() { - t.Errorf("error = %v, expected err = %v", err, tc.err) - return - } - if !cmp.Equal(got, tc.want) { - t.Errorf("got: \n%v\nwant:\n%v", got, tc.want) - } - }) - } -} diff --git a/sdk/go/types.go b/sdk/go/types.go deleted file mode 100644 index 600af0b658..0000000000 --- a/sdk/go/types.go +++ /dev/null @@ -1,57 +0,0 @@ -package feast - -import ( - "github.com/feast-dev/feast/sdk/go/protos/feast/types" - "github.com/golang/protobuf/proto" -) - -// Row is a map of fields -type Row map[string]*types.Value - -func (r Row) equalTo(other Row) bool { - for k, v := range r { - if otherV, ok := other[k]; !ok { - return false - } else { - if !proto.Equal(v, otherV) { - return false - } - } - } - return true -} - -// StrVal is a string type feast value -func StrVal(val string) *types.Value { - return &types.Value{Val: &types.Value_StringVal{StringVal: val}} -} - -// Int32Val is a int32 type feast value -func Int32Val(val int32) *types.Value { - return &types.Value{Val: &types.Value_Int32Val{Int32Val: val}} -} - -// Int64Val is a int64 type feast value -func Int64Val(val int64) *types.Value { - return &types.Value{Val: &types.Value_Int64Val{Int64Val: val}} -} - -// FloatVal is a float32 type feast value -func FloatVal(val float32) *types.Value { - return &types.Value{Val: &types.Value_FloatVal{FloatVal: val}} -} - -// DoubleVal is a float64 type feast value -func DoubleVal(val float64) *types.Value { - return &types.Value{Val: &types.Value_DoubleVal{DoubleVal: val}} -} - -// BoolVal is a bool type feast value -func BoolVal(val bool) *types.Value { - return &types.Value{Val: &types.Value_BoolVal{BoolVal: val}} -} - -// BytesVal is a bytes type feast value -func BytesVal(val []byte) *types.Value { - return &types.Value{Val: &types.Value_BytesVal{BytesVal: val}} -} diff --git a/sdk/python/MANIFEST.in b/sdk/python/MANIFEST.in index 2852924ea9..0eeaa181b2 100644 --- a/sdk/python/MANIFEST.in +++ b/sdk/python/MANIFEST.in @@ -1 +1,4 @@ -recursive-include feast/protos/ *.py \ No newline at end of file +recursive-include feast/protos/ *.py +recursive-include feast py.typed *.pyi + +recursive-include feast/embedded_go/lib/ *.py *.so diff --git a/sdk/python/docs/index.rst b/sdk/python/docs/index.rst index b8dfb6e914..52783b40e3 100644 --- a/sdk/python/docs/index.rst +++ b/sdk/python/docs/index.rst @@ -1,5 +1,5 @@ Feast Python API Documentation -============================= +============================== Feature Store @@ -7,6 +7,8 @@ Feature Store .. automodule:: feast.feature_store :members: + :undoc-members: + :show-inheritance: Config ================== @@ -22,6 +24,48 @@ Data Source :members: :exclude-members: KafkaOptions, KafkaSource, KinesisOptions, KinesisSource +BigQuery Source +------------------ + +.. automodule:: feast.infra.offline_stores.bigquery_source + :members: + :exclude-members: BigQueryOptions + +Redshift Source +------------------ + +.. automodule:: feast.infra.offline_stores.redshift_source + :members: + :exclude-members: RedshiftOptions + +Snowflake Source +------------------ + +.. automodule:: feast.infra.offline_stores.snowflake_source + :members: + :exclude-members: SnowflakeOptions + +Spark Source +------------------ + +.. automodule:: feast.infra.offline_stores.contrib.spark_offline_store.spark_source + :members: + :exclude-members: SparkOptions + +Trino Source +------------------ + +.. automodule:: feast.infra.offline_stores.contrib.trino_offline_store.trino_source + :members: + :exclude-members: TrinoOptions + + +File Source +------------------ + +.. automodule:: feast.infra.offline_stores.file_source + :members: + :exclude-members: FileOptions Entity ================== @@ -30,16 +74,153 @@ Entity :inherited-members: :members: - Feature View ================== .. automodule:: feast.feature_view :members: +On Demand Feature View +====================== + +.. automodule:: feast.on_demand_feature_view + :members: + Feature ================== .. automodule:: feast.feature :inherited-members: :members: + +Feature Service +================== + +.. automodule:: feast.feature_service + :inherited-members: + :members: + +Registry +================== + +.. automodule:: feast.registry + :inherited-members: + :members: + +Registry Store +================== + +.. automodule:: feast.registry_store + :inherited-members: + :members: + :exclude-members: NoopRegistryStore + + +Provider +================== + +.. automodule:: feast.infra.provider + :inherited-members: + :members: + +Passthrough Provider +-------------------- + +.. automodule:: feast.infra.passthrough_provider + :members: + +Local Provider +------------------ + +.. automodule:: feast.infra.local + :members: + :exclude-members: LocalRegistryStore + +GCP Provider +------------------ + +.. automodule:: feast.infra.gcp + :members: + :exclude-members: GCSRegistryStore + +AWS Provider +------------------ + +.. automodule:: feast.infra.aws + :members: + :exclude-members: S3RegistryStore + +Offline Store +================== + +.. automodule:: feast.infra.offline_stores.offline_store + :members: + +File Offline Store +------------------ + +.. automodule:: feast.infra.offline_stores.file + :members: + +BigQuery Offline Store +---------------------- + +.. automodule:: feast.infra.offline_stores.bigquery + :members: + +Redshift Offline Store +---------------------- + +.. automodule:: feast.infra.offline_stores.redshift + :members: + +Snowflake Offline Store +----------------------- + +.. automodule:: feast.infra.offline_stores.snowflake + :members: + +Spark Offline Store +------------------- + +.. automodule:: feast.infra.offline_stores.contrib.spark_offline_store.spark + :members: + +Trino Offline Store +------------------- + +.. automodule:: feast.infra.offline_stores.contrib.trino_offline_store.trino + :members: + + +Online Store +================== + +.. automodule:: feast.infra.online_stores.online_store + :inherited-members: + :members: + +Sqlite Online Store +------------------- + +.. automodule:: feast.infra.online_stores.sqlite + :members: + +Datastore Online Store +---------------------- + +.. automodule:: feast.infra.online_stores.datastore + :members: + +DynamoDB Online Store +--------------------- + +.. automodule:: feast.infra.online_stores.dynamodb + :members: + +Redis Online Store +------------------ + +.. automodule:: feast.infra.online_stores.redis + :members: + :noindex: \ No newline at end of file diff --git a/sdk/python/docs/source/feast.diff.rst b/sdk/python/docs/source/feast.diff.rst new file mode 100644 index 0000000000..e414217171 --- /dev/null +++ b/sdk/python/docs/source/feast.diff.rst @@ -0,0 +1,37 @@ +feast.diff package +================== + +Submodules +---------- + +feast.diff.infra\_diff module +----------------------------- + +.. automodule:: feast.diff.infra_diff + :members: + :undoc-members: + :show-inheritance: + +feast.diff.property\_diff module +-------------------------------- + +.. automodule:: feast.diff.property_diff + :members: + :undoc-members: + :show-inheritance: + +feast.diff.registry\_diff module +-------------------------------- + +.. automodule:: feast.diff.registry_diff + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: feast.diff + :members: + :undoc-members: + :show-inheritance: diff --git a/sdk/python/docs/source/feast.dqm.profilers.rst b/sdk/python/docs/source/feast.dqm.profilers.rst new file mode 100644 index 0000000000..24f452ada8 --- /dev/null +++ b/sdk/python/docs/source/feast.dqm.profilers.rst @@ -0,0 +1,29 @@ +feast.dqm.profilers package +=========================== + +Submodules +---------- + +feast.dqm.profilers.ge\_profiler module +--------------------------------------- + +.. automodule:: feast.dqm.profilers.ge_profiler + :members: + :undoc-members: + :show-inheritance: + +feast.dqm.profilers.profiler module +----------------------------------- + +.. automodule:: feast.dqm.profilers.profiler + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: feast.dqm.profilers + :members: + :undoc-members: + :show-inheritance: diff --git a/sdk/python/docs/source/feast.dqm.rst b/sdk/python/docs/source/feast.dqm.rst new file mode 100644 index 0000000000..0c1b82f0fa --- /dev/null +++ b/sdk/python/docs/source/feast.dqm.rst @@ -0,0 +1,29 @@ +feast.dqm package +================= + +Subpackages +----------- + +.. toctree:: + :maxdepth: 4 + + feast.dqm.profilers + +Submodules +---------- + +feast.dqm.errors module +----------------------- + +.. automodule:: feast.dqm.errors + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: feast.dqm + :members: + :undoc-members: + :show-inheritance: diff --git a/sdk/python/docs/source/feast.infra.offline_stores.contrib.rst b/sdk/python/docs/source/feast.infra.offline_stores.contrib.rst new file mode 100644 index 0000000000..4fabad6844 --- /dev/null +++ b/sdk/python/docs/source/feast.infra.offline_stores.contrib.rst @@ -0,0 +1,30 @@ +feast.infra.offline\_stores.contrib package +=========================================== + +Subpackages +----------- + +.. toctree:: + :maxdepth: 4 + + feast.infra.offline_stores.contrib.spark_offline_store + feast.infra.offline_stores.contrib.trino_offline_store + +Submodules +---------- + +feast.infra.offline\_stores.contrib.contrib\_repo\_configuration module +----------------------------------------------------------------------- + +.. automodule:: feast.infra.offline_stores.contrib.contrib_repo_configuration + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: feast.infra.offline_stores.contrib + :members: + :undoc-members: + :show-inheritance: diff --git a/sdk/python/docs/source/feast.infra.offline_stores.contrib.spark_offline_store.rst b/sdk/python/docs/source/feast.infra.offline_stores.contrib.spark_offline_store.rst new file mode 100644 index 0000000000..d91389faab --- /dev/null +++ b/sdk/python/docs/source/feast.infra.offline_stores.contrib.spark_offline_store.rst @@ -0,0 +1,29 @@ +feast.infra.offline\_stores.contrib.spark\_offline\_store package +================================================================= + +Submodules +---------- + +feast.infra.offline\_stores.contrib.spark\_offline\_store.spark module +---------------------------------------------------------------------- + +.. automodule:: feast.infra.offline_stores.contrib.spark_offline_store.spark + :members: + :undoc-members: + :show-inheritance: + +feast.infra.offline\_stores.contrib.spark\_offline\_store.spark\_source module +------------------------------------------------------------------------------ + +.. automodule:: feast.infra.offline_stores.contrib.spark_offline_store.spark_source + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: feast.infra.offline_stores.contrib.spark_offline_store + :members: + :undoc-members: + :show-inheritance: diff --git a/sdk/python/docs/source/feast.infra.offline_stores.contrib.trino_offline_store.connectors.rst b/sdk/python/docs/source/feast.infra.offline_stores.contrib.trino_offline_store.connectors.rst new file mode 100644 index 0000000000..a0ee8dceab --- /dev/null +++ b/sdk/python/docs/source/feast.infra.offline_stores.contrib.trino_offline_store.connectors.rst @@ -0,0 +1,21 @@ +feast.infra.offline\_stores.contrib.trino\_offline\_store.connectors package +============================================================================ + +Submodules +---------- + +feast.infra.offline\_stores.contrib.trino\_offline\_store.connectors.upload module +---------------------------------------------------------------------------------- + +.. automodule:: feast.infra.offline_stores.contrib.trino_offline_store.connectors.upload + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: feast.infra.offline_stores.contrib.trino_offline_store.connectors + :members: + :undoc-members: + :show-inheritance: diff --git a/sdk/python/docs/source/feast.infra.offline_stores.contrib.trino_offline_store.rst b/sdk/python/docs/source/feast.infra.offline_stores.contrib.trino_offline_store.rst new file mode 100644 index 0000000000..0fe9cdc461 --- /dev/null +++ b/sdk/python/docs/source/feast.infra.offline_stores.contrib.trino_offline_store.rst @@ -0,0 +1,54 @@ +feast.infra.offline\_stores.contrib.trino\_offline\_store package +================================================================= + +Subpackages +----------- + +.. toctree:: + :maxdepth: 4 + + feast.infra.offline_stores.contrib.trino_offline_store.connectors + feast.infra.offline_stores.contrib.trino_offline_store.test_config + +Submodules +---------- + +feast.infra.offline\_stores.contrib.trino\_offline\_store.trino module +---------------------------------------------------------------------- + +.. automodule:: feast.infra.offline_stores.contrib.trino_offline_store.trino + :members: + :undoc-members: + :show-inheritance: + +feast.infra.offline\_stores.contrib.trino\_offline\_store.trino\_queries module +------------------------------------------------------------------------------- + +.. automodule:: feast.infra.offline_stores.contrib.trino_offline_store.trino_queries + :members: + :undoc-members: + :show-inheritance: + +feast.infra.offline\_stores.contrib.trino\_offline\_store.trino\_source module +------------------------------------------------------------------------------ + +.. automodule:: feast.infra.offline_stores.contrib.trino_offline_store.trino_source + :members: + :undoc-members: + :show-inheritance: + +feast.infra.offline\_stores.contrib.trino\_offline\_store.trino\_type\_map module +--------------------------------------------------------------------------------- + +.. automodule:: feast.infra.offline_stores.contrib.trino_offline_store.trino_type_map + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: feast.infra.offline_stores.contrib.trino_offline_store + :members: + :undoc-members: + :show-inheritance: diff --git a/sdk/python/docs/source/feast.infra.offline_stores.contrib.trino_offline_store.test_config.rst b/sdk/python/docs/source/feast.infra.offline_stores.contrib.trino_offline_store.test_config.rst new file mode 100644 index 0000000000..ef43a191d0 --- /dev/null +++ b/sdk/python/docs/source/feast.infra.offline_stores.contrib.trino_offline_store.test_config.rst @@ -0,0 +1,21 @@ +feast.infra.offline\_stores.contrib.trino\_offline\_store.test\_config package +============================================================================== + +Submodules +---------- + +feast.infra.offline\_stores.contrib.trino\_offline\_store.test\_config.manual\_tests module +------------------------------------------------------------------------------------------- + +.. automodule:: feast.infra.offline_stores.contrib.trino_offline_store.test_config.manual_tests + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: feast.infra.offline_stores.contrib.trino_offline_store.test_config + :members: + :undoc-members: + :show-inheritance: diff --git a/sdk/python/docs/source/feast.infra.offline_stores.rst b/sdk/python/docs/source/feast.infra.offline_stores.rst index a40c8b115e..7949c9efb3 100644 --- a/sdk/python/docs/source/feast.infra.offline_stores.rst +++ b/sdk/python/docs/source/feast.infra.offline_stores.rst @@ -1,6 +1,14 @@ feast.infra.offline\_stores package =================================== +Subpackages +----------- + +.. toctree:: + :maxdepth: 4 + + feast.infra.offline_stores.contrib + Submodules ---------- @@ -12,6 +20,14 @@ feast.infra.offline\_stores.bigquery module :undoc-members: :show-inheritance: +feast.infra.offline\_stores.bigquery\_source module +--------------------------------------------------- + +.. automodule:: feast.infra.offline_stores.bigquery_source + :members: + :undoc-members: + :show-inheritance: + feast.infra.offline\_stores.file module --------------------------------------- @@ -20,10 +36,10 @@ feast.infra.offline\_stores.file module :undoc-members: :show-inheritance: -feast.infra.offline\_stores.helpers module ------------------------------------------- +feast.infra.offline\_stores.file\_source module +----------------------------------------------- -.. automodule:: feast.infra.offline_stores.helpers +.. automodule:: feast.infra.offline_stores.file_source :members: :undoc-members: :show-inheritance: @@ -36,6 +52,46 @@ feast.infra.offline\_stores.offline\_store module :undoc-members: :show-inheritance: +feast.infra.offline\_stores.offline\_utils module +------------------------------------------------- + +.. automodule:: feast.infra.offline_stores.offline_utils + :members: + :undoc-members: + :show-inheritance: + +feast.infra.offline\_stores.redshift module +------------------------------------------- + +.. automodule:: feast.infra.offline_stores.redshift + :members: + :undoc-members: + :show-inheritance: + +feast.infra.offline\_stores.redshift\_source module +--------------------------------------------------- + +.. automodule:: feast.infra.offline_stores.redshift_source + :members: + :undoc-members: + :show-inheritance: + +feast.infra.offline\_stores.snowflake module +-------------------------------------------- + +.. automodule:: feast.infra.offline_stores.snowflake + :members: + :undoc-members: + :show-inheritance: + +feast.infra.offline\_stores.snowflake\_source module +---------------------------------------------------- + +.. automodule:: feast.infra.offline_stores.snowflake_source + :members: + :undoc-members: + :show-inheritance: + Module contents --------------- diff --git a/sdk/python/docs/source/feast.infra.online_stores.rst b/sdk/python/docs/source/feast.infra.online_stores.rst new file mode 100644 index 0000000000..5c23796bf8 --- /dev/null +++ b/sdk/python/docs/source/feast.infra.online_stores.rst @@ -0,0 +1,61 @@ +feast.infra.online\_stores package +================================== + +Submodules +---------- + +feast.infra.online\_stores.datastore module +------------------------------------------- + +.. automodule:: feast.infra.online_stores.datastore + :members: + :undoc-members: + :show-inheritance: + +feast.infra.online\_stores.dynamodb module +------------------------------------------ + +.. automodule:: feast.infra.online_stores.dynamodb + :members: + :undoc-members: + :show-inheritance: + +feast.infra.online\_stores.helpers module +----------------------------------------- + +.. automodule:: feast.infra.online_stores.helpers + :members: + :undoc-members: + :show-inheritance: + +feast.infra.online\_stores.online\_store module +----------------------------------------------- + +.. automodule:: feast.infra.online_stores.online_store + :members: + :undoc-members: + :show-inheritance: + +feast.infra.online\_stores.redis module +--------------------------------------- + +.. automodule:: feast.infra.online_stores.redis + :members: + :undoc-members: + :show-inheritance: + +feast.infra.online\_stores.sqlite module +---------------------------------------- + +.. automodule:: feast.infra.online_stores.sqlite + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: feast.infra.online_stores + :members: + :undoc-members: + :show-inheritance: diff --git a/sdk/python/docs/source/feast.infra.rst b/sdk/python/docs/source/feast.infra.rst index 63dcb737ec..5473c3927e 100644 --- a/sdk/python/docs/source/feast.infra.rst +++ b/sdk/python/docs/source/feast.infra.rst @@ -8,10 +8,20 @@ Subpackages :maxdepth: 4 feast.infra.offline_stores + feast.infra.online_stores + feast.infra.utils Submodules ---------- +feast.infra.aws module +---------------------- + +.. automodule:: feast.infra.aws + :members: + :undoc-members: + :show-inheritance: + feast.infra.gcp module ---------------------- @@ -20,6 +30,14 @@ feast.infra.gcp module :undoc-members: :show-inheritance: +feast.infra.infra\_object module +-------------------------------- + +.. automodule:: feast.infra.infra_object + :members: + :undoc-members: + :show-inheritance: + feast.infra.key\_encoding\_utils module --------------------------------------- @@ -36,6 +54,14 @@ feast.infra.local module :undoc-members: :show-inheritance: +feast.infra.passthrough\_provider module +---------------------------------------- + +.. automodule:: feast.infra.passthrough_provider + :members: + :undoc-members: + :show-inheritance: + feast.infra.provider module --------------------------- diff --git a/sdk/python/docs/source/feast.infra.utils.rst b/sdk/python/docs/source/feast.infra.utils.rst new file mode 100644 index 0000000000..9655acc206 --- /dev/null +++ b/sdk/python/docs/source/feast.infra.utils.rst @@ -0,0 +1,29 @@ +feast.infra.utils package +========================= + +Submodules +---------- + +feast.infra.utils.aws\_utils module +----------------------------------- + +.. automodule:: feast.infra.utils.aws_utils + :members: + :undoc-members: + :show-inheritance: + +feast.infra.utils.snowflake\_utils module +----------------------------------------- + +.. automodule:: feast.infra.utils.snowflake_utils + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: feast.infra.utils + :members: + :undoc-members: + :show-inheritance: diff --git a/sdk/python/docs/source/feast.loaders.rst b/sdk/python/docs/source/feast.loaders.rst index da3cbc13ad..d4968a2999 100644 --- a/sdk/python/docs/source/feast.loaders.rst +++ b/sdk/python/docs/source/feast.loaders.rst @@ -4,30 +4,6 @@ feast.loaders package Submodules ---------- -feast.loaders.abstract\_producer module ---------------------------------------- - -.. automodule:: feast.loaders.abstract_producer - :members: - :undoc-members: - :show-inheritance: - -feast.loaders.file module -------------------------- - -.. automodule:: feast.loaders.file - :members: - :undoc-members: - :show-inheritance: - -feast.loaders.ingest module ---------------------------- - -.. automodule:: feast.loaders.ingest - :members: - :undoc-members: - :show-inheritance: - feast.loaders.yaml module ------------------------- diff --git a/sdk/python/docs/source/feast.protos.feast.core.rst b/sdk/python/docs/source/feast.protos.feast.core.rst index ce25dda21f..f29a51719c 100644 --- a/sdk/python/docs/source/feast.protos.feast.core.rst +++ b/sdk/python/docs/source/feast.protos.feast.core.rst @@ -4,22 +4,6 @@ feast.protos.feast.core package Submodules ---------- -feast.protos.feast.core.CoreService\_pb2 module ------------------------------------------------ - -.. automodule:: feast.protos.feast.core.CoreService_pb2 - :members: - :undoc-members: - :show-inheritance: - -feast.protos.feast.core.CoreService\_pb2\_grpc module ------------------------------------------------------ - -.. automodule:: feast.protos.feast.core.CoreService_pb2_grpc - :members: - :undoc-members: - :show-inheritance: - feast.protos.feast.core.DataFormat\_pb2 module ---------------------------------------------- @@ -52,6 +36,38 @@ feast.protos.feast.core.DataSource\_pb2\_grpc module :undoc-members: :show-inheritance: +feast.protos.feast.core.DatastoreTable\_pb2 module +-------------------------------------------------- + +.. automodule:: feast.protos.feast.core.DatastoreTable_pb2 + :members: + :undoc-members: + :show-inheritance: + +feast.protos.feast.core.DatastoreTable\_pb2\_grpc module +-------------------------------------------------------- + +.. automodule:: feast.protos.feast.core.DatastoreTable_pb2_grpc + :members: + :undoc-members: + :show-inheritance: + +feast.protos.feast.core.DynamoDBTable\_pb2 module +------------------------------------------------- + +.. automodule:: feast.protos.feast.core.DynamoDBTable_pb2 + :members: + :undoc-members: + :show-inheritance: + +feast.protos.feast.core.DynamoDBTable\_pb2\_grpc module +------------------------------------------------------- + +.. automodule:: feast.protos.feast.core.DynamoDBTable_pb2_grpc + :members: + :undoc-members: + :show-inheritance: + feast.protos.feast.core.Entity\_pb2 module ------------------------------------------ @@ -68,6 +84,22 @@ feast.protos.feast.core.Entity\_pb2\_grpc module :undoc-members: :show-inheritance: +feast.protos.feast.core.FeatureService\_pb2 module +-------------------------------------------------- + +.. automodule:: feast.protos.feast.core.FeatureService_pb2 + :members: + :undoc-members: + :show-inheritance: + +feast.protos.feast.core.FeatureService\_pb2\_grpc module +-------------------------------------------------------- + +.. automodule:: feast.protos.feast.core.FeatureService_pb2_grpc + :members: + :undoc-members: + :show-inheritance: + feast.protos.feast.core.FeatureTable\_pb2 module ------------------------------------------------ @@ -84,6 +116,22 @@ feast.protos.feast.core.FeatureTable\_pb2\_grpc module :undoc-members: :show-inheritance: +feast.protos.feast.core.FeatureViewProjection\_pb2 module +--------------------------------------------------------- + +.. automodule:: feast.protos.feast.core.FeatureViewProjection_pb2 + :members: + :undoc-members: + :show-inheritance: + +feast.protos.feast.core.FeatureViewProjection\_pb2\_grpc module +--------------------------------------------------------------- + +.. automodule:: feast.protos.feast.core.FeatureViewProjection_pb2_grpc + :members: + :undoc-members: + :show-inheritance: + feast.protos.feast.core.FeatureView\_pb2 module ----------------------------------------------- @@ -116,6 +164,38 @@ feast.protos.feast.core.Feature\_pb2\_grpc module :undoc-members: :show-inheritance: +feast.protos.feast.core.InfraObject\_pb2 module +----------------------------------------------- + +.. automodule:: feast.protos.feast.core.InfraObject_pb2 + :members: + :undoc-members: + :show-inheritance: + +feast.protos.feast.core.InfraObject\_pb2\_grpc module +----------------------------------------------------- + +.. automodule:: feast.protos.feast.core.InfraObject_pb2_grpc + :members: + :undoc-members: + :show-inheritance: + +feast.protos.feast.core.OnDemandFeatureView\_pb2 module +------------------------------------------------------- + +.. automodule:: feast.protos.feast.core.OnDemandFeatureView_pb2 + :members: + :undoc-members: + :show-inheritance: + +feast.protos.feast.core.OnDemandFeatureView\_pb2\_grpc module +------------------------------------------------------------- + +.. automodule:: feast.protos.feast.core.OnDemandFeatureView_pb2_grpc + :members: + :undoc-members: + :show-inheritance: + feast.protos.feast.core.Registry\_pb2 module -------------------------------------------- @@ -132,6 +212,54 @@ feast.protos.feast.core.Registry\_pb2\_grpc module :undoc-members: :show-inheritance: +feast.protos.feast.core.RequestFeatureView\_pb2 module +------------------------------------------------------ + +.. automodule:: feast.protos.feast.core.RequestFeatureView_pb2 + :members: + :undoc-members: + :show-inheritance: + +feast.protos.feast.core.RequestFeatureView\_pb2\_grpc module +------------------------------------------------------------ + +.. automodule:: feast.protos.feast.core.RequestFeatureView_pb2_grpc + :members: + :undoc-members: + :show-inheritance: + +feast.protos.feast.core.SavedDataset\_pb2 module +------------------------------------------------ + +.. automodule:: feast.protos.feast.core.SavedDataset_pb2 + :members: + :undoc-members: + :show-inheritance: + +feast.protos.feast.core.SavedDataset\_pb2\_grpc module +------------------------------------------------------ + +.. automodule:: feast.protos.feast.core.SavedDataset_pb2_grpc + :members: + :undoc-members: + :show-inheritance: + +feast.protos.feast.core.SqliteTable\_pb2 module +----------------------------------------------- + +.. automodule:: feast.protos.feast.core.SqliteTable_pb2 + :members: + :undoc-members: + :show-inheritance: + +feast.protos.feast.core.SqliteTable\_pb2\_grpc module +----------------------------------------------------- + +.. automodule:: feast.protos.feast.core.SqliteTable_pb2_grpc + :members: + :undoc-members: + :show-inheritance: + feast.protos.feast.core.Store\_pb2 module ----------------------------------------- @@ -148,6 +276,22 @@ feast.protos.feast.core.Store\_pb2\_grpc module :undoc-members: :show-inheritance: +feast.protos.feast.core.ValidationProfile\_pb2 module +----------------------------------------------------- + +.. automodule:: feast.protos.feast.core.ValidationProfile_pb2 + :members: + :undoc-members: + :show-inheritance: + +feast.protos.feast.core.ValidationProfile\_pb2\_grpc module +----------------------------------------------------------- + +.. automodule:: feast.protos.feast.core.ValidationProfile_pb2_grpc + :members: + :undoc-members: + :show-inheritance: + Module contents --------------- diff --git a/sdk/python/docs/source/feast.protos.feast.rst b/sdk/python/docs/source/feast.protos.feast.rst index f519165db8..456d960d73 100644 --- a/sdk/python/docs/source/feast.protos.feast.rst +++ b/sdk/python/docs/source/feast.protos.feast.rst @@ -10,7 +10,6 @@ Subpackages feast.protos.feast.core feast.protos.feast.serving feast.protos.feast.storage - feast.protos.feast.types Module contents --------------- diff --git a/sdk/python/docs/source/feast.protos.feast.serving.rst b/sdk/python/docs/source/feast.protos.feast.serving.rst index 06ee4e61f4..792335b189 100644 --- a/sdk/python/docs/source/feast.protos.feast.serving.rst +++ b/sdk/python/docs/source/feast.protos.feast.serving.rst @@ -4,6 +4,22 @@ feast.protos.feast.serving package Submodules ---------- +feast.protos.feast.serving.Connector\_pb2 module +------------------------------------------------ + +.. automodule:: feast.protos.feast.serving.Connector_pb2 + :members: + :undoc-members: + :show-inheritance: + +feast.protos.feast.serving.Connector\_pb2\_grpc module +------------------------------------------------------ + +.. automodule:: feast.protos.feast.serving.Connector_pb2_grpc + :members: + :undoc-members: + :show-inheritance: + feast.protos.feast.serving.ServingService\_pb2 module ----------------------------------------------------- @@ -20,6 +36,22 @@ feast.protos.feast.serving.ServingService\_pb2\_grpc module :undoc-members: :show-inheritance: +feast.protos.feast.serving.TransformationService\_pb2 module +------------------------------------------------------------ + +.. automodule:: feast.protos.feast.serving.TransformationService_pb2 + :members: + :undoc-members: + :show-inheritance: + +feast.protos.feast.serving.TransformationService\_pb2\_grpc module +------------------------------------------------------------------ + +.. automodule:: feast.protos.feast.serving.TransformationService_pb2_grpc + :members: + :undoc-members: + :show-inheritance: + Module contents --------------- diff --git a/sdk/python/docs/source/feast.protos.feast.types.rst b/sdk/python/docs/source/feast.protos.feast.types.rst deleted file mode 100644 index aeb31bc9ad..0000000000 --- a/sdk/python/docs/source/feast.protos.feast.types.rst +++ /dev/null @@ -1,61 +0,0 @@ -feast.protos.feast.types package -================================ - -Submodules ----------- - -feast.protos.feast.types.EntityKey\_pb2 module ----------------------------------------------- - -.. automodule:: feast.protos.feast.types.EntityKey_pb2 - :members: - :undoc-members: - :show-inheritance: - -feast.protos.feast.types.EntityKey\_pb2\_grpc module ----------------------------------------------------- - -.. automodule:: feast.protos.feast.types.EntityKey_pb2_grpc - :members: - :undoc-members: - :show-inheritance: - -feast.protos.feast.types.Field\_pb2 module ------------------------------------------- - -.. automodule:: feast.protos.feast.types.Field_pb2 - :members: - :undoc-members: - :show-inheritance: - -feast.protos.feast.types.Field\_pb2\_grpc module ------------------------------------------------- - -.. automodule:: feast.protos.feast.types.Field_pb2_grpc - :members: - :undoc-members: - :show-inheritance: - -feast.protos.feast.types.Value\_pb2 module ------------------------------------------- - -.. automodule:: feast.protos.feast.types.Value_pb2 - :members: - :undoc-members: - :show-inheritance: - -feast.protos.feast.types.Value\_pb2\_grpc module ------------------------------------------------- - -.. automodule:: feast.protos.feast.types.Value_pb2_grpc - :members: - :undoc-members: - :show-inheritance: - -Module contents ---------------- - -.. automodule:: feast.protos.feast.types - :members: - :undoc-members: - :show-inheritance: diff --git a/sdk/python/docs/source/feast.rst b/sdk/python/docs/source/feast.rst index 3e1ee90603..35220913be 100644 --- a/sdk/python/docs/source/feast.rst +++ b/sdk/python/docs/source/feast.rst @@ -7,34 +7,27 @@ Subpackages .. toctree:: :maxdepth: 4 + feast.diff + feast.dqm feast.infra feast.loaders feast.protos - feast.staging Submodules ---------- -feast.cli module ----------------- - -.. automodule:: feast.cli - :members: - :undoc-members: - :show-inheritance: +feast.base\_feature\_view module +-------------------------------- -feast.client module -------------------- - -.. automodule:: feast.client +.. automodule:: feast.base_feature_view :members: :undoc-members: :show-inheritance: -feast.config module -------------------- +feast.cli module +---------------- -.. automodule:: feast.config +.. automodule:: feast.cli :members: :undoc-members: :show-inheritance: @@ -87,6 +80,14 @@ feast.errors module :undoc-members: :show-inheritance: +feast.feast\_object module +-------------------------- + +.. automodule:: feast.feast_object + :members: + :undoc-members: + :show-inheritance: + feast.feature module -------------------- @@ -95,18 +96,26 @@ feast.feature module :undoc-members: :show-inheritance: -feast.feature\_store module ---------------------------- +feast.feature\_server module +---------------------------- -.. automodule:: feast.feature_store +.. automodule:: feast.feature_server + :members: + :undoc-members: + :show-inheritance: + +feast.feature\_service module +----------------------------- + +.. automodule:: feast.feature_service :members: :undoc-members: :show-inheritance: -feast.feature\_table module +feast.feature\_store module --------------------------- -.. automodule:: feast.feature_table +.. automodule:: feast.feature_store :members: :undoc-members: :show-inheritance: @@ -119,6 +128,54 @@ feast.feature\_view module :undoc-members: :show-inheritance: +feast.feature\_view\_projection module +-------------------------------------- + +.. automodule:: feast.feature_view_projection + :members: + :undoc-members: + :show-inheritance: + +feast.field module +------------------ + +.. automodule:: feast.field + :members: + :undoc-members: + :show-inheritance: + +feast.flags module +------------------ + +.. automodule:: feast.flags + :members: + :undoc-members: + :show-inheritance: + +feast.flags\_helper module +-------------------------- + +.. automodule:: feast.flags_helper + :members: + :undoc-members: + :show-inheritance: + +feast.importer module +--------------------- + +.. automodule:: feast.importer + :members: + :undoc-members: + :show-inheritance: + +feast.inference module +---------------------- + +.. automodule:: feast.inference + :members: + :undoc-members: + :show-inheritance: + feast.names module ------------------ @@ -127,6 +184,14 @@ feast.names module :undoc-members: :show-inheritance: +feast.on\_demand\_feature\_view module +-------------------------------------- + +.. automodule:: feast.on_demand_feature_view + :members: + :undoc-members: + :show-inheritance: + feast.online\_response module ----------------------------- @@ -135,6 +200,14 @@ feast.online\_response module :undoc-members: :show-inheritance: +feast.proto\_json module +------------------------ + +.. automodule:: feast.proto_json + :members: + :undoc-members: + :show-inheritance: + feast.registry module --------------------- @@ -143,6 +216,14 @@ feast.registry module :undoc-members: :show-inheritance: +feast.registry\_store module +---------------------------- + +.. automodule:: feast.registry_store + :members: + :undoc-members: + :show-inheritance: + feast.repo\_config module ------------------------- @@ -151,6 +232,14 @@ feast.repo\_config module :undoc-members: :show-inheritance: +feast.repo\_contents module +--------------------------- + +.. automodule:: feast.repo_contents + :members: + :undoc-members: + :show-inheritance: + feast.repo\_operations module ----------------------------- @@ -159,10 +248,26 @@ feast.repo\_operations module :undoc-members: :show-inheritance: -feast.telemetry module ----------------------- +feast.request\_feature\_view module +----------------------------------- + +.. automodule:: feast.request_feature_view + :members: + :undoc-members: + :show-inheritance: + +feast.saved\_dataset module +--------------------------- -.. automodule:: feast.telemetry +.. automodule:: feast.saved_dataset + :members: + :undoc-members: + :show-inheritance: + +feast.transformation\_server module +----------------------------------- + +.. automodule:: feast.transformation_server :members: :undoc-members: :show-inheritance: @@ -175,6 +280,22 @@ feast.type\_map module :undoc-members: :show-inheritance: +feast.types module +------------------ + +.. automodule:: feast.types + :members: + :undoc-members: + :show-inheritance: + +feast.usage module +------------------ + +.. automodule:: feast.usage + :members: + :undoc-members: + :show-inheritance: + feast.utils module ------------------ diff --git a/sdk/python/docs/source/feast.staging.rst b/sdk/python/docs/source/feast.staging.rst deleted file mode 100644 index 5ad63281a6..0000000000 --- a/sdk/python/docs/source/feast.staging.rst +++ /dev/null @@ -1,29 +0,0 @@ -feast.staging package -===================== - -Submodules ----------- - -feast.staging.entities module ------------------------------ - -.. automodule:: feast.staging.entities - :members: - :undoc-members: - :show-inheritance: - -feast.staging.storage\_client module ------------------------------------- - -.. automodule:: feast.staging.storage_client - :members: - :undoc-members: - :show-inheritance: - -Module contents ---------------- - -.. automodule:: feast.staging - :members: - :undoc-members: - :show-inheritance: diff --git a/sdk/python/docs/source/index.rst b/sdk/python/docs/source/index.rst index 347f45ed73..52783b40e3 100644 --- a/sdk/python/docs/source/index.rst +++ b/sdk/python/docs/source/index.rst @@ -1,9 +1,9 @@ Feast Python API Documentation -============================= +============================== Feature Store ---------------------------- +================== .. automodule:: feast.feature_store :members: @@ -24,6 +24,48 @@ Data Source :members: :exclude-members: KafkaOptions, KafkaSource, KinesisOptions, KinesisSource +BigQuery Source +------------------ + +.. automodule:: feast.infra.offline_stores.bigquery_source + :members: + :exclude-members: BigQueryOptions + +Redshift Source +------------------ + +.. automodule:: feast.infra.offline_stores.redshift_source + :members: + :exclude-members: RedshiftOptions + +Snowflake Source +------------------ + +.. automodule:: feast.infra.offline_stores.snowflake_source + :members: + :exclude-members: SnowflakeOptions + +Spark Source +------------------ + +.. automodule:: feast.infra.offline_stores.contrib.spark_offline_store.spark_source + :members: + :exclude-members: SparkOptions + +Trino Source +------------------ + +.. automodule:: feast.infra.offline_stores.contrib.trino_offline_store.trino_source + :members: + :exclude-members: TrinoOptions + + +File Source +------------------ + +.. automodule:: feast.infra.offline_stores.file_source + :members: + :exclude-members: FileOptions Entity ================== @@ -32,16 +74,153 @@ Entity :inherited-members: :members: - Feature View ================== .. automodule:: feast.feature_view :members: +On Demand Feature View +====================== + +.. automodule:: feast.on_demand_feature_view + :members: + Feature ================== .. automodule:: feast.feature :inherited-members: :members: + +Feature Service +================== + +.. automodule:: feast.feature_service + :inherited-members: + :members: + +Registry +================== + +.. automodule:: feast.registry + :inherited-members: + :members: + +Registry Store +================== + +.. automodule:: feast.registry_store + :inherited-members: + :members: + :exclude-members: NoopRegistryStore + + +Provider +================== + +.. automodule:: feast.infra.provider + :inherited-members: + :members: + +Passthrough Provider +-------------------- + +.. automodule:: feast.infra.passthrough_provider + :members: + +Local Provider +------------------ + +.. automodule:: feast.infra.local + :members: + :exclude-members: LocalRegistryStore + +GCP Provider +------------------ + +.. automodule:: feast.infra.gcp + :members: + :exclude-members: GCSRegistryStore + +AWS Provider +------------------ + +.. automodule:: feast.infra.aws + :members: + :exclude-members: S3RegistryStore + +Offline Store +================== + +.. automodule:: feast.infra.offline_stores.offline_store + :members: + +File Offline Store +------------------ + +.. automodule:: feast.infra.offline_stores.file + :members: + +BigQuery Offline Store +---------------------- + +.. automodule:: feast.infra.offline_stores.bigquery + :members: + +Redshift Offline Store +---------------------- + +.. automodule:: feast.infra.offline_stores.redshift + :members: + +Snowflake Offline Store +----------------------- + +.. automodule:: feast.infra.offline_stores.snowflake + :members: + +Spark Offline Store +------------------- + +.. automodule:: feast.infra.offline_stores.contrib.spark_offline_store.spark + :members: + +Trino Offline Store +------------------- + +.. automodule:: feast.infra.offline_stores.contrib.trino_offline_store.trino + :members: + + +Online Store +================== + +.. automodule:: feast.infra.online_stores.online_store + :inherited-members: + :members: + +Sqlite Online Store +------------------- + +.. automodule:: feast.infra.online_stores.sqlite + :members: + +Datastore Online Store +---------------------- + +.. automodule:: feast.infra.online_stores.datastore + :members: + +DynamoDB Online Store +--------------------- + +.. automodule:: feast.infra.online_stores.dynamodb + :members: + +Redis Online Store +------------------ + +.. automodule:: feast.infra.online_stores.redis + :members: + :noindex: \ No newline at end of file diff --git a/sdk/python/docs/source/modules.rst b/sdk/python/docs/source/modules.rst deleted file mode 100644 index 3a6f8333ab..0000000000 --- a/sdk/python/docs/source/modules.rst +++ /dev/null @@ -1,7 +0,0 @@ -feast -===== - -.. toctree:: - :maxdepth: 4 - - feast diff --git a/sdk/python/feast/.DS_Store b/sdk/python/feast/.DS_Store deleted file mode 100644 index aefa87c191..0000000000 Binary files a/sdk/python/feast/.DS_Store and /dev/null differ diff --git a/sdk/python/feast/__init__.py b/sdk/python/feast/__init__.py index 430fd9f715..5127e03b56 100644 --- a/sdk/python/feast/__init__.py +++ b/sdk/python/feast/__init__.py @@ -5,15 +5,24 @@ from feast.infra.offline_stores.bigquery_source import BigQuerySource from feast.infra.offline_stores.file_source import FileSource from feast.infra.offline_stores.redshift_source import RedshiftSource +from feast.infra.offline_stores.snowflake_source import SnowflakeSource -from .client import Client -from .data_source import KafkaSource, KinesisSource, SourceType +from .data_source import ( + KafkaSource, + KinesisSource, + PushSource, + RequestSource, + SourceType, +) from .entity import Entity from .feature import Feature +from .feature_service import FeatureService from .feature_store import FeatureStore -from .feature_table import FeatureTable from .feature_view import FeatureView +from .field import Field +from .on_demand_feature_view import OnDemandFeatureView from .repo_config import RepoConfig +from .request_feature_view import RequestFeatureView from .value_type import ValueType logging.basicConfig( @@ -29,18 +38,23 @@ pass __all__ = [ - "Client", "Entity", "KafkaSource", "KinesisSource", "Feature", + "Field", + "FeatureService", "FeatureStore", - "FeatureTable", "FeatureView", + "OnDemandFeatureView", "RepoConfig", "SourceType", "ValueType", "BigQuerySource", "FileSource", "RedshiftSource", + "RequestFeatureView", + "SnowflakeSource", + "PushSource", + "RequestSource", ] diff --git a/sdk/python/feast/base_feature_view.py b/sdk/python/feast/base_feature_view.py new file mode 100644 index 0000000000..80b3b0cec8 --- /dev/null +++ b/sdk/python/feast/base_feature_view.py @@ -0,0 +1,223 @@ +# Copyright 2021 The Feast Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from abc import ABC, abstractmethod +from datetime import datetime +from typing import Dict, List, Optional, Type + +from google.protobuf.json_format import MessageToJson +from proto import Message + +from feast.feature_view_projection import FeatureViewProjection +from feast.field import Field + + +class BaseFeatureView(ABC): + """ + A BaseFeatureView defines a logical group of features. + + Attributes: + name: The unique name of the base feature view. + features: The list of features defined as part of this base feature view. + description: A human-readable description. + tags: A dictionary of key-value pairs to store arbitrary metadata. + owner: The owner of the base feature view, typically the email of the primary + maintainer. + projection: The feature view projection storing modifications to be applied to + this base feature view at retrieval time. + created_timestamp (optional): The time when the base feature view was created. + last_updated_timestamp (optional): The time when the base feature view was last + updated. + """ + + name: str + features: List[Field] + description: str + tags: Dict[str, str] + owner: str + projection: FeatureViewProjection + created_timestamp: Optional[datetime] + last_updated_timestamp: Optional[datetime] + + @abstractmethod + def __init__( + self, + *, + name: str, + features: Optional[List[Field]] = None, + description: str = "", + tags: Optional[Dict[str, str]] = None, + owner: str = "", + ): + """ + Creates a BaseFeatureView object. + + Args: + name: The unique name of the base feature view. + features (optional): The list of features defined as part of this base feature view. + description (optional): A human-readable description. + tags (optional): A dictionary of key-value pairs to store arbitrary metadata. + owner (optional): The owner of the base feature view, typically the email of the + primary maintainer. + + Raises: + ValueError: A field mapping conflicts with an Entity or a Feature. + """ + assert name is not None + self.name = name + self.features = features or [] + self.description = description + self.tags = tags or {} + self.owner = owner + self.projection = FeatureViewProjection.from_definition(self) + self.created_timestamp = None + self.last_updated_timestamp = None + + @property + @abstractmethod + def proto_class(self) -> Type[Message]: + pass + + @abstractmethod + def to_proto(self) -> Message: + pass + + @classmethod + @abstractmethod + def from_proto(cls, feature_view_proto): + pass + + @abstractmethod + def __copy__(self): + """Returns a deep copy of this base feature view.""" + pass + + def __repr__(self): + items = (f"{k} = {v}" for k, v in self.__dict__.items()) + return f"<{self.__class__.__name__}({', '.join(items)})>" + + def __str__(self): + return str(MessageToJson(self.to_proto())) + + def __hash__(self): + return hash((self.name)) + + def __getitem__(self, item): + assert isinstance(item, list) + + referenced_features = [] + for feature in self.features: + if feature.name in item: + referenced_features.append(feature) + + cp = self.__copy__() + cp.projection.features = referenced_features + + return cp + + def __eq__(self, other): + if not isinstance(other, BaseFeatureView): + raise TypeError( + "Comparisons should only involve BaseFeatureView class objects." + ) + + if ( + self.name != other.name + or sorted(self.features) != sorted(other.features) + or self.projection != other.projection + or self.description != other.description + or self.tags != other.tags + or self.owner != other.owner + ): + return False + + return True + + def ensure_valid(self): + """ + Validates the state of this feature view locally. + + Raises: + ValueError: The feature view is invalid. + """ + if not self.name: + raise ValueError("Feature view needs a name.") + + def with_name(self, name: str): + """ + Returns a renamed copy of this base feature view. This renamed copy should only be + used for query operations and will not modify the underlying base feature view. + + Args: + name: The name to assign to the copy. + """ + cp = self.__copy__() + cp.projection.name_alias = name + + return cp + + def set_projection(self, feature_view_projection: FeatureViewProjection) -> None: + """ + Sets the feature view projection of this base feature view to the given projection. + + Args: + feature_view_projection: The feature view projection to be set. + + Raises: + ValueError: The name or features of the projection do not match. + """ + if feature_view_projection.name != self.name: + raise ValueError( + f"The projection for the {self.name} FeatureView cannot be applied because it differs in name. " + f"The projection is named {feature_view_projection.name} and the name indicates which " + "FeatureView the projection is for." + ) + + for feature in feature_view_projection.features: + if feature not in self.features: + raise ValueError( + f"The projection for {self.name} cannot be applied because it contains {feature.name} which the " + "FeatureView doesn't have." + ) + + self.projection = feature_view_projection + + def with_projection(self, feature_view_projection: FeatureViewProjection): + """ + Returns a copy of this base feature view with the feature view projection set to + the given projection. + + Args: + feature_view_projection: The feature view projection to assign to the copy. + + Raises: + ValueError: The name or features of the projection do not match. + """ + if feature_view_projection.name != self.name: + raise ValueError( + f"The projection for the {self.name} FeatureView cannot be applied because it differs in name. " + f"The projection is named {feature_view_projection.name} and the name indicates which " + "FeatureView the projection is for." + ) + + for feature in feature_view_projection.features: + if feature not in self.features: + raise ValueError( + f"The projection for {self.name} cannot be applied because it contains {feature.name} which the " + "FeatureView doesn't have." + ) + + cp = self.__copy__() + cp.projection = feature_view_projection + + return cp diff --git a/sdk/python/feast/cli.py b/sdk/python/feast/cli.py index a1fe6cd795..80cd1844b6 100644 --- a/sdk/python/feast/cli.py +++ b/sdk/python/feast/cli.py @@ -13,29 +13,35 @@ # limitations under the License. import logging +import warnings from datetime import datetime from pathlib import Path -from typing import List +from typing import List, Optional import click import pkg_resources import yaml +from colorama import Fore, Style +from dateutil import parser -from feast import utils +from feast import flags, flags_helper, utils +from feast.constants import DEFAULT_FEATURE_TRANSFORMATION_SERVER_PORT from feast.errors import FeastObjectNotFoundException, FeastProviderLoginError from feast.feature_store import FeatureStore +from feast.feature_view import FeatureView +from feast.on_demand_feature_view import OnDemandFeatureView from feast.repo_config import load_repo_config from feast.repo_operations import ( apply_total, cli_check_repo, generate_project_name, init_repo, + plan, registry_dump, teardown, ) _logger = logging.getLogger(__name__) -DATETIME_ISO = "%Y-%m-%dT%H:%M:%s" class NoOptionDefaultFormat(click.Command): @@ -57,8 +63,13 @@ def format_options(self, ctx: click.Context, formatter: click.HelpFormatter): "-c", help="Switch to a different feature repository directory before executing the given subcommand.", ) +@click.option( + "--log-level", + default="info", + help="The logging level. One of DEBUG, INFO, WARNING, ERROR, and CRITICAL (case-insensitive).", +) @click.pass_context -def cli(ctx: click.Context, chdir: str): +def cli(ctx: click.Context, chdir: Optional[str], log_level: str): """ Feast CLI @@ -68,6 +79,25 @@ def cli(ctx: click.Context, chdir: str): """ ctx.ensure_object(dict) ctx.obj["CHDIR"] = Path.cwd() if chdir is None else Path(chdir).absolute() + try: + level = getattr(logging, log_level.upper()) + logging.basicConfig( + format="%(asctime)s %(levelname)s:%(message)s", + datefmt="%m/%d/%Y %I:%M:%S %p", + level=level, + ) + # Override the logging level for already created loggers (due to loggers being created at the import time) + # Note, that format & datefmt does not need to be set, because by default child loggers don't override them + + # Also note, that mypy complains that logging.root doesn't have "manager" because of the way it's written. + # So we have to put a type ignore hint for mypy. + for logger_name in logging.root.manager.loggerDict: # type: ignore + if "feast" in logger_name: + logger = logging.getLogger(logger_name) + logger.setLevel(level) + + except Exception as e: + raise e pass @@ -79,6 +109,84 @@ def version(): print(f'Feast SDK Version: "{pkg_resources.get_distribution("feast")}"') +@cli.command() +@click.pass_context +def endpoint(ctx: click.Context): + """ + Display feature server endpoints + """ + repo = ctx.obj["CHDIR"] + cli_check_repo(repo) + store = FeatureStore(repo_path=str(repo)) + endpoint = store.get_feature_server_endpoint() + if endpoint is not None: + _logger.info( + f"Feature server endpoint: {Style.BRIGHT + Fore.GREEN}{endpoint}{Style.RESET_ALL}" + ) + else: + _logger.info("There is no active feature server.") + + +@cli.group(name="data-sources") +def data_sources_cmd(): + """ + Access data sources + """ + pass + + +@data_sources_cmd.command("describe") +@click.argument("name", type=click.STRING) +@click.pass_context +def data_source_describe(ctx: click.Context, name: str): + """ + Describe a data source + """ + repo = ctx.obj["CHDIR"] + cli_check_repo(repo) + store = FeatureStore(repo_path=str(repo)) + + try: + data_source = store.get_data_source(name) + except FeastObjectNotFoundException as e: + print(e) + exit(1) + + warnings.warn( + "Describing data sources will only work properly if all data sources have names or table names specified. " + "Starting Feast 0.21, data source unique names will be required to encourage data source discovery.", + RuntimeWarning, + ) + print( + yaml.dump( + yaml.safe_load(str(data_source)), default_flow_style=False, sort_keys=False + ) + ) + + +@data_sources_cmd.command(name="list") +@click.pass_context +def data_source_list(ctx: click.Context): + """ + List all data sources + """ + repo = ctx.obj["CHDIR"] + cli_check_repo(repo) + store = FeatureStore(repo_path=str(repo)) + table = [] + for datasource in store.list_data_sources(): + table.append([datasource.name, datasource.__class__]) + + from tabulate import tabulate + + warnings.warn( + "Listing data sources will only work properly if all data sources have names or table names specified. " + "Starting Feast 0.21, data source unique names will be required to encourage data source discovery", + RuntimeWarning, + ) + print(tabulate(table, headers=["NAME", "CLASS"], tablefmt="plain")) + + @cli.group(name="entities") def entities_cmd(): """ @@ -129,6 +237,63 @@ def entity_list(ctx: click.Context): print(tabulate(table, headers=["NAME", "DESCRIPTION", "TYPE"], tablefmt="plain")) +@cli.group(name="feature-services") +def feature_services_cmd(): + """ + Access feature services + """ + pass + + +@feature_services_cmd.command("describe") +@click.argument("name", type=click.STRING) +@click.pass_context +def feature_service_describe(ctx: click.Context, name: str): + """ + Describe a feature service + """ + repo = ctx.obj["CHDIR"] + cli_check_repo(repo) + store = FeatureStore(repo_path=str(repo)) + + try: + feature_service = store.get_feature_service(name) + except FeastObjectNotFoundException as e: + print(e) + exit(1) + + print( + yaml.dump( + yaml.safe_load(str(feature_service)), + default_flow_style=False, + sort_keys=False, + ) + ) + + +@feature_services_cmd.command(name="list") +@click.pass_context +def feature_service_list(ctx: click.Context): + """ + List all feature services + """ + repo = ctx.obj["CHDIR"] + cli_check_repo(repo) + store = FeatureStore(repo_path=str(repo)) + feature_services = [] + for feature_service in store.list_feature_services(): + feature_names = [] + for projection in feature_service.feature_view_projections: + feature_names.extend( + [f"{projection.name}:{feature.name}" for feature in projection.features] + ) + feature_services.append([feature_service.name, ", ".join(feature_names)]) + + from tabulate import tabulate + + print(tabulate(feature_services, headers=["NAME", "FEATURES"], tablefmt="plain")) + + @cli.group(name="feature-views") def feature_views_cmd(): """ @@ -171,12 +336,100 @@ def feature_view_list(ctx: click.Context): cli_check_repo(repo) store = FeatureStore(repo_path=str(repo)) table = [] - for feature_view in store.list_feature_views(): - table.append([feature_view.name, feature_view.entities]) + for feature_view in [ + *store.list_feature_views(), + *store.list_request_feature_views(), + *store.list_on_demand_feature_views(), + ]: + entities = set() + if isinstance(feature_view, FeatureView): + entities.update(feature_view.entities) + elif isinstance(feature_view, OnDemandFeatureView): + for backing_fv in feature_view.source_feature_view_projections.values(): + entities.update(store.get_feature_view(backing_fv.name).entities) + table.append( + [ + feature_view.name, + entities if len(entities) > 0 else "n/a", + type(feature_view).__name__, + ] + ) from tabulate import tabulate - print(tabulate(table, headers=["NAME", "ENTITIES"], tablefmt="plain")) + print(tabulate(table, headers=["NAME", "ENTITIES", "TYPE"], tablefmt="plain")) + + +@cli.group(name="on-demand-feature-views") +def on_demand_feature_views_cmd(): + """ + [Experimental] Access on demand feature views + """ + pass + + +@on_demand_feature_views_cmd.command("describe") +@click.argument("name", type=click.STRING) +@click.pass_context +def on_demand_feature_view_describe(ctx: click.Context, name: str): + """ + [Experimental] Describe an on demand feature view + """ + repo = ctx.obj["CHDIR"] + cli_check_repo(repo) + store = FeatureStore(repo_path=str(repo)) + + try: + on_demand_feature_view = store.get_on_demand_feature_view(name) + except FeastObjectNotFoundException as e: + print(e) + exit(1) + + print( + yaml.dump( + yaml.safe_load(str(on_demand_feature_view)), + default_flow_style=False, + sort_keys=False, + ) + ) + + +@on_demand_feature_views_cmd.command(name="list") +@click.pass_context +def on_demand_feature_view_list(ctx: click.Context): + """ + [Experimental] List all on demand feature views + """ + repo = ctx.obj["CHDIR"] + cli_check_repo(repo) + store = FeatureStore(repo_path=str(repo)) + table = [] + for on_demand_feature_view in store.list_on_demand_feature_views(): + table.append([on_demand_feature_view.name]) + + from tabulate import tabulate + + print(tabulate(table, headers=["NAME"], tablefmt="plain")) + + +@cli.command("plan", cls=NoOptionDefaultFormat) +@click.option( + "--skip-source-validation", + is_flag=True, + help="Don't validate the data sources by checking for that the tables exist.", +) +@click.pass_context +def plan_command(ctx: click.Context, skip_source_validation: bool): + """ + Create or update a feature store deployment + """ + repo = ctx.obj["CHDIR"] + cli_check_repo(repo) + repo_config = load_repo_config(repo) + try: + plan(repo_config, repo, skip_source_validation) + except FeastProviderLoginError as e: + print(str(e)) @cli.command("apply", cls=NoOptionDefaultFormat) @@ -248,8 +501,8 @@ def materialize_command( store = FeatureStore(repo_path=str(repo)) store.materialize( feature_views=None if not views else views, - start_date=utils.make_tzaware(datetime.fromisoformat(start_ts)), - end_date=utils.make_tzaware(datetime.fromisoformat(end_ts)), + start_date=utils.make_tzaware(parser.parse(start_ts)), + end_date=utils.make_tzaware(parser.parse(end_ts)), ) @@ -285,7 +538,9 @@ def materialize_incremental_command(ctx: click.Context, end_ts: str, views: List @click.option( "--template", "-t", - type=click.Choice(["local", "gcp", "aws"], case_sensitive=False), + type=click.Choice( + ["local", "gcp", "aws", "snowflake", "spark"], case_sensitive=False + ), help="Specify a template for the created project", default="local", ) @@ -300,5 +555,158 @@ def init_command(project_directory, minimal: bool, template: str): init_repo(project_directory, template) +@cli.command("serve") +@click.option( + "--host", + "-h", + type=click.STRING, + default="127.0.0.1", + help="Specify a host for the server [default: 127.0.0.1]", +) +@click.option( + "--port", + "-p", + type=click.INT, + default=6566, + help="Specify a port for the server [default: 6566]", +) +@click.option( + "--no-access-log", is_flag=True, help="Disable the Uvicorn access log.", +) +@click.pass_context +def serve_command(ctx: click.Context, host: str, port: int, no_access_log: bool): + """Start a feature server locally on a given port.""" + repo = ctx.obj["CHDIR"] + cli_check_repo(repo) + store = FeatureStore(repo_path=str(repo)) + + store.serve(host, port, no_access_log) + + +@cli.command("serve_transformations") +@click.option( + "--port", + "-p", + type=click.INT, + default=DEFAULT_FEATURE_TRANSFORMATION_SERVER_PORT, + help="Specify a port for the server", +) +@click.pass_context +def serve_transformations_command(ctx: click.Context, port: int): + """[Experimental] Start a feature consumption server locally on a given port.""" + repo = ctx.obj["CHDIR"] + cli_check_repo(repo) + store = FeatureStore(repo_path=str(repo)) + + store.serve_transformations(port) + + +@cli.group(name="alpha") +def alpha_cmd(): + """ + Access alpha features + """ + pass + + +@alpha_cmd.command("list") +@click.pass_context +def list_alpha_features(ctx: click.Context): + """ + Lists all alpha features + """ + repo = ctx.obj["CHDIR"] + cli_check_repo(repo) + repo_path = str(repo) + store = FeatureStore(repo_path=repo_path) + + flags_to_show = flags.FLAG_NAMES.copy() + flags_to_show.remove(flags.FLAG_ALPHA_FEATURES_NAME) + print("Alpha features:") + for flag in flags_to_show: + enabled_string = ( + "enabled" + if flags_helper.feature_flag_enabled(store.config, flag) + else "disabled" + ) + print(f"{flag}: {enabled_string}") + + +@alpha_cmd.command("enable-all") +@click.pass_context +def enable_alpha_features(ctx: click.Context): + """ + Enables all alpha features + """ + repo = ctx.obj["CHDIR"] + cli_check_repo(repo) + repo_path = str(repo) + store = FeatureStore(repo_path=repo_path) + + if store.config.flags is None: + store.config.flags = {} + for flag_name in flags.FLAG_NAMES: + store.config.flags[flag_name] = True + store.config.write_to_path(Path(repo_path)) + + +@alpha_cmd.command("enable") +@click.argument("name", type=click.STRING) +@click.pass_context +def enable_alpha_feature(ctx: click.Context, name: str): + """ + Enables an alpha feature + """ + if name not in flags.FLAG_NAMES: + raise ValueError(f"Flag name, {name}, not valid.") + + repo = ctx.obj["CHDIR"] + cli_check_repo(repo) + repo_path = str(repo) + store = FeatureStore(repo_path=repo_path) + + if store.config.flags is None: + store.config.flags = {} + store.config.flags[flags.FLAG_ALPHA_FEATURES_NAME] = True + store.config.flags[name] = True + store.config.write_to_path(Path(repo_path)) + + +@alpha_cmd.command("disable") +@click.argument("name", type=click.STRING) +@click.pass_context +def disable_alpha_feature(ctx: click.Context, name: str): + """ + Disables an alpha feature + """ + if name not in flags.FLAG_NAMES: + raise ValueError(f"Flag name, {name}, not valid.") + + repo = ctx.obj["CHDIR"] + cli_check_repo(repo) + repo_path = str(repo) + store = FeatureStore(repo_path=repo_path) + + if store.config.flags is None or name not in store.config.flags: + return + store.config.flags[name] = False + store.config.write_to_path(Path(repo_path)) + + +@alpha_cmd.command("disable-all") +@click.pass_context +def disable_alpha_features(ctx: click.Context): + """ + Disables all alpha features + """ + repo = ctx.obj["CHDIR"] + cli_check_repo(repo) + repo_path = str(repo) + store = FeatureStore(repo_path=repo_path) + + store.config.flags = None + store.config.write_to_path(Path(repo_path)) + + if __name__ == "__main__": cli() diff --git a/sdk/python/feast/client.py b/sdk/python/feast/client.py deleted file mode 100644 index 52da77fd5f..0000000000 --- a/sdk/python/feast/client.py +++ /dev/null @@ -1,980 +0,0 @@ -# Copyright 2019 The Feast Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import logging -import multiprocessing -import shutil -import warnings -from typing import Any, Dict, List, Optional, Union - -import grpc -import pandas as pd - -from feast import BigQuerySource, FileSource -from feast.config import Config -from feast.constants import ConfigOptions as opt -from feast.data_format import ParquetFormat -from feast.entity import Entity -from feast.feature import Feature, FeatureRef, _build_feature_references -from feast.feature_table import FeatureTable -from feast.grpc import auth as feast_auth -from feast.grpc.grpc import create_grpc_channel -from feast.loaders.ingest import ( - _check_field_mappings, - _read_table_from_source, - _upload_to_bq_source, - _upload_to_file_source, - _write_non_partitioned_table_from_source, - _write_partitioned_table_from_source, -) -from feast.online_response import OnlineResponse, _infer_online_entity_rows -from feast.protos.feast.core.CoreService_pb2 import ( - ApplyEntityRequest, - ApplyFeatureTableRequest, - ArchiveProjectRequest, - CreateProjectRequest, - DeleteFeatureTableRequest, - GetEntityRequest, - GetFeastCoreVersionRequest, - GetFeatureTableRequest, - ListEntitiesRequest, - ListFeaturesRequest, - ListFeatureTablesRequest, - ListProjectsRequest, -) -from feast.protos.feast.core.CoreService_pb2_grpc import CoreServiceStub -from feast.protos.feast.serving.ServingService_pb2 import ( - GetFeastServingInfoRequest, - GetOnlineFeaturesRequestV2, -) -from feast.protos.feast.serving.ServingService_pb2_grpc import ServingServiceStub -from feast.registry import Registry -from feast.usage import Usage - -_logger = logging.getLogger(__name__) - -CPU_COUNT: int = multiprocessing.cpu_count() - -warnings.simplefilter("once", DeprecationWarning) - - -class Client: - """ - Feast Client: Used for creating, managing, and retrieving features. - """ - - def __init__(self, options: Optional[Dict[str, str]] = None, **kwargs): - """ - The Feast Client should be initialized with at least one service url - Please see constants.py for configuration options. Commonly used options - or arguments include: - core_url: Feast Core URL. Used to manage features - serving_url: Feast Serving URL. Used to retrieve features - project: Sets the active project. This field is optional. - core_secure: Use client-side SSL/TLS for Core gRPC API - serving_secure: Use client-side SSL/TLS for Serving gRPC API - enable_auth: Enable authentication and authorization - auth_provider: Authentication provider – "google" or "oauth" - if auth_provider is "oauth", the following fields are mandatory – - oauth_grant_type, oauth_client_id, oauth_client_secret, oauth_audience, oauth_token_request_url - - Args: - options: Configuration options to initialize client with - **kwargs: Additional keyword arguments that will be used as - configuration options along with "options" - """ - - if options is None: - options = dict() - self._config = Config(options={**options, **kwargs}) - - self._core_service_stub: Optional[CoreServiceStub] = None - self._serving_service_stub: Optional[ServingServiceStub] = None - self._auth_metadata: Optional[grpc.AuthMetadataPlugin] = None - self._registry_impl: Optional[Registry] = None - - # Configure Auth Metadata Plugin if auth is enabled - if self._config.getboolean(opt.ENABLE_AUTH): - self._auth_metadata = feast_auth.get_auth_metadata_plugin(self._config) - - self._usage = Usage() - - @property - def config(self) -> Config: - return self._config - - @property - def _core_service(self): - """ - Creates or returns the gRPC Feast Core Service Stub - - Returns: CoreServiceStub - """ - if not self._core_service_stub: - channel = create_grpc_channel( - url=self._config.get(opt.CORE_URL), - enable_ssl=self._config.getboolean(opt.CORE_ENABLE_SSL), - enable_auth=self._config.getboolean(opt.ENABLE_AUTH), - ssl_server_cert_path=self._config.get(opt.CORE_SERVER_SSL_CERT), - auth_metadata_plugin=self._auth_metadata, - timeout=self._config.getint(opt.GRPC_CONNECTION_TIMEOUT), - ) - self._core_service_stub = CoreServiceStub(channel) - return self._core_service_stub - - @property - def _use_object_store_registry(self) -> bool: - return self._config.exists(opt.REGISTRY_PATH) - - @property - def _registry(self): - if self._registry_impl is None: - self._registry_impl = Registry(self._config.get(opt.REGISTRY_PATH)) - return self._registry_impl - - @property - def _serving_service(self): - """ - Creates or returns the gRPC Feast Serving Service Stub. If both `opentracing` - and `grpcio-opentracing` are installed, an opentracing interceptor will be - instantiated based on the global tracer. - - Returns: ServingServiceStub - """ - if not self._serving_service_stub: - channel = create_grpc_channel( - url=self._config.get(opt.SERVING_URL), - enable_ssl=self._config.getboolean(opt.SERVING_ENABLE_SSL), - enable_auth=self._config.getboolean(opt.ENABLE_AUTH), - ssl_server_cert_path=self._config.get(opt.SERVING_SERVER_SSL_CERT), - auth_metadata_plugin=self._auth_metadata, - timeout=self._config.getint(opt.GRPC_CONNECTION_TIMEOUT), - ) - try: - import opentracing - from grpc_opentracing import open_tracing_client_interceptor - from grpc_opentracing.grpcext import intercept_channel - - interceptor = open_tracing_client_interceptor( - opentracing.global_tracer() - ) - channel = intercept_channel(channel, interceptor) - except ImportError: - pass - self._serving_service_stub = ServingServiceStub(channel) - return self._serving_service_stub - - def _extra_grpc_params(self) -> Dict[str, Any]: - return dict( - timeout=self._config.getint(opt.GRPC_CONNECTION_TIMEOUT), - metadata=self._get_grpc_metadata(), - ) - - @property - def core_url(self) -> str: - """ - Retrieve Feast Core URL - - Returns: - Feast Core URL string - """ - return self._config.get(opt.CORE_URL) - - @core_url.setter - def core_url(self, value: str): - """ - Set the Feast Core URL - - Args: - value: Feast Core URL - """ - self._config.set(opt.CORE_URL, value) - - @property - def serving_url(self) -> str: - """ - Retrieve Feast Serving URL - - Returns: - Feast Serving URL string - """ - return self._config.get(opt.SERVING_URL) - - @serving_url.setter - def serving_url(self, value: str): - """ - Set the Feast Serving URL - - Args: - value: Feast Serving URL - """ - self._config.set(opt.SERVING_URL, value) - - @property - def job_service_url(self) -> str: - """ - Retrieve Feast Job Service URL - - Returns: - Feast Job Service URL string - """ - return self._config.get(opt.JOB_SERVICE_URL) - - @job_service_url.setter - def job_service_url(self, value: str): - """ - Set the Feast Job Service URL - - Args: - value: Feast Job Service URL - """ - self._config.set(opt.JOB_SERVICE_URL, value) - - @property - def core_secure(self) -> bool: - """ - Retrieve Feast Core client-side SSL/TLS setting - - Returns: - Whether client-side SSL/TLS is enabled - """ - return self._config.getboolean(opt.CORE_ENABLE_SSL) - - @core_secure.setter - def core_secure(self, value: bool): - """ - Set the Feast Core client-side SSL/TLS setting - - Args: - value: True to enable client-side SSL/TLS - """ - self._config.set(opt.CORE_ENABLE_SSL, value) - - @property - def serving_secure(self) -> bool: - """ - Retrieve Feast Serving client-side SSL/TLS setting - - Returns: - Whether client-side SSL/TLS is enabled - """ - return self._config.getboolean(opt.SERVING_ENABLE_SSL) - - @serving_secure.setter - def serving_secure(self, value: bool): - """ - Set the Feast Serving client-side SSL/TLS setting - - Args: - value: True to enable client-side SSL/TLS - """ - self._config.set(opt.SERVING_ENABLE_SSL, value) - - @property - def job_service_secure(self) -> bool: - """ - Retrieve Feast Job Service client-side SSL/TLS setting - - Returns: - Whether client-side SSL/TLS is enabled - """ - return self._config.getboolean(opt.JOB_SERVICE_ENABLE_SSL) - - @job_service_secure.setter - def job_service_secure(self, value: bool): - """ - Set the Feast Job Service client-side SSL/TLS setting - - Args: - value: True to enable client-side SSL/TLS - """ - self._config.set(opt.JOB_SERVICE_ENABLE_SSL, value) - - def version(self, sdk_only=False): - """ - Returns version information from Feast Core and Feast Serving - """ - import pkg_resources - - try: - sdk_version = pkg_resources.get_distribution("feast").version - except pkg_resources.DistributionNotFound: - sdk_version = "local build" - if sdk_only: - return sdk_version - - result = { - "sdk": {"version": sdk_version}, - "serving": "not configured", - "core": "not configured", - } - - if self.serving_url: - serving_version = self._serving_service.GetFeastServingInfo( - GetFeastServingInfoRequest(), - timeout=self._config.getint(opt.GRPC_CONNECTION_TIMEOUT), - metadata=self._get_grpc_metadata(), - ).version - result["serving"] = {"url": self.serving_url, "version": serving_version} - - if not self._use_object_store_registry and self.core_url: - core_version = self._core_service.GetFeastCoreVersion( - GetFeastCoreVersionRequest(), - timeout=self._config.getint(opt.GRPC_CONNECTION_TIMEOUT), - metadata=self._get_grpc_metadata(), - ).version - result["core"] = {"url": self.core_url, "version": core_version} - - return result - - @property - def project(self) -> str: - """ - Retrieve currently active project - - Returns: - Project name - """ - if not self._config.get(opt.PROJECT): - raise ValueError("No project has been configured.") - return self._config.get(opt.PROJECT) - - def set_project(self, project: Optional[str] = None): - """ - Set currently active Feast project - - Args: - project: Project to set as active. If unset, will reset to the default project. - """ - if project is None: - project = opt().PROJECT - self._config.set(opt.PROJECT, project) - - def list_projects(self) -> List[str]: - """ - List all active Feast projects - - Returns: - List of project names - - """ - - if self._use_object_store_registry: - raise NotImplementedError( - "Projects are not implemented for object store registry." - ) - else: - response = self._core_service.ListProjects( - ListProjectsRequest(), - timeout=self._config.getint(opt.GRPC_CONNECTION_TIMEOUT), - metadata=self._get_grpc_metadata(), - ) - return list(response.projects) - - def create_project(self, project: str): - """ - Creates a Feast project - - Args: - project: Name of project - """ - - if self._use_object_store_registry: - raise NotImplementedError( - "Projects are not implemented for object store registry." - ) - else: - self._core_service.CreateProject( - CreateProjectRequest(name=project), - timeout=self._config.getint(opt.GRPC_CONNECTION_TIMEOUT), - metadata=self._get_grpc_metadata(), - ) - - def archive_project(self, project): - """ - Archives a project. Project will still continue to function for - ingestion and retrieval, but will be in a read-only state. It will - also not be visible from the Core API for management purposes. - - Args: - project: Name of project to archive - """ - - if self._use_object_store_registry: - raise NotImplementedError( - "Projects are not implemented for object store registry." - ) - else: - try: - self._core_service.ArchiveProject( - ArchiveProjectRequest(name=project), - timeout=self._config.getint(opt.GRPC_CONNECTION_TIMEOUT), - metadata=self._get_grpc_metadata(), - ) - except grpc.RpcError as e: - raise grpc.RpcError(e.details()) - - # revert to the default project - if self._project == project: - self._project = opt().PROJECT - - def apply( - self, - objects: Union[List[Union[Entity, FeatureTable]], Entity, FeatureTable], - project: str = None, - ): - """ - Idempotently registers entities and feature tables with Feast Core. Either a single - entity or feature table or a list can be provided. - - Args: - objects: List of entities and/or feature tables that will be registered - - Examples: - >>> from feast import Client - >>> from feast.entity import Entity - >>> from feast.value_type import ValueType - >>> - >>> feast_client = Client(core_url="localhost:6565") - >>> entity = Entity( - >>> name="driver_entity", - >>> description="Driver entity for car rides", - >>> value_type=ValueType.STRING, - >>> labels={ - >>> "key": "val" - >>> } - >>> ) - >>> feast_client.apply(entity) - """ - - self._usage.log("apply") - if project is None: - project = self.project - - if not isinstance(objects, list): - objects = [objects] - for obj in objects: - if isinstance(obj, Entity): - self._apply_entity(project, obj) # type: ignore - elif isinstance(obj, FeatureTable): - self._apply_feature_table(project, obj) # type: ignore - else: - raise ValueError( - f"Could not determine object type to apply {obj} with type {type(obj)}. Type must be Entity or FeatureTable." - ) - - def apply_entity(self, entities: Union[List[Entity], Entity], project: str = None): - """ - Deprecated. Please see apply(). - """ - warnings.warn( - "The method apply_entity() is being deprecated. Please use apply() instead. Feast 0.10 and onwards will not support apply_entity().", - DeprecationWarning, - ) - - if project is None: - project = self.project - - if not isinstance(entities, list): - entities = [entities] - for entity in entities: - if isinstance(entity, Entity): - self._apply_entity(project, entity) # type: ignore - continue - raise ValueError(f"Could not determine entity type to apply {entity}") - - def _apply_entity(self, project: str, entity: Entity): - """ - Registers a single entity with Feast - - Args: - entity: Entity that will be registered - """ - - if self._use_object_store_registry: - return self._registry.apply_entity(entity, project) - else: - entity.is_valid() - entity_proto = entity.to_spec_proto() - - # Convert the entity to a request and send to Feast Core - try: - apply_entity_response = self._core_service.ApplyEntity( - ApplyEntityRequest(project=project, spec=entity_proto), # type: ignore - timeout=self._config.getint(opt.GRPC_CONNECTION_TIMEOUT), - metadata=self._get_grpc_metadata(), - ) - except grpc.RpcError as e: - raise grpc.RpcError(e.details()) - - # Extract the returned entity - applied_entity = Entity.from_proto(apply_entity_response.entity) - - # Deep copy from the returned entity to the local entity - entity._update_from_entity(applied_entity) - - def list_entities( - self, project: str = None, labels: Dict[str, str] = dict() - ) -> List[Entity]: - """ - Retrieve a list of entities from Feast Core - - Args: - project: Filter entities based on project name - labels: User-defined labels that these entities are associated with - - Returns: - List of entities - """ - - if project is None: - project = self.project - - if self._use_object_store_registry: - return self._registry.list_entities(project) - else: - filter = ListEntitiesRequest.Filter(project=project, labels=labels) - - # Get latest entities from Feast Core - entity_protos = self._core_service.ListEntities( - ListEntitiesRequest(filter=filter), metadata=self._get_grpc_metadata(), - ) - - # Extract entities and return - entities = [] - for entity_proto in entity_protos.entities: - entity = Entity.from_proto(entity_proto) - entity._client = self - entities.append(entity) - return entities - - def get_entity(self, name: str, project: str = None) -> Entity: - """ - Retrieves an entity. - - Args: - project: Feast project that this entity belongs to - name: Name of entity - - Returns: - Returns either the specified entity, or raises an exception if - none is found - """ - - self._usage.log("get_entity") - - if project is None: - project = self.project - - if self._use_object_store_registry: - return self._registry.get_entity(name, project) - else: - try: - get_entity_response = self._core_service.GetEntity( - GetEntityRequest(project=project, name=name.strip()), - metadata=self._get_grpc_metadata(), - ) - except grpc.RpcError as e: - raise grpc.RpcError(e.details()) - entity = Entity.from_proto(get_entity_response.entity) - - return entity - - def apply_feature_table( - self, - feature_tables: Union[List[FeatureTable], FeatureTable], - project: str = None, - ): - """ - Deprecated. Please see apply(). - """ - warnings.warn( - "The method apply_feature_table() is being deprecated. Please use apply() instead. Feast 0.10 and onwards will not support apply_feature_table().", - DeprecationWarning, - ) - - if project is None: - project = self.project - - if not isinstance(feature_tables, list): - feature_tables = [feature_tables] - for feature_table in feature_tables: - if isinstance(feature_table, FeatureTable): - self._apply_feature_table(project, feature_table) # type: ignore - continue - raise ValueError( - f"Could not determine feature table type to apply {feature_table}" - ) - - def _apply_feature_table(self, project: str, feature_table: FeatureTable): - """ - Registers a single feature table with Feast - - Args: - feature_table: Feature table that will be registered - """ - - if self._use_object_store_registry: - return self._registry.apply_feature_table(feature_table, project) - else: - feature_table.is_valid() - feature_table_proto = feature_table.to_spec_proto() - - # Convert the feature table to a request and send to Feast Core - try: - apply_feature_table_response = self._core_service.ApplyFeatureTable( - ApplyFeatureTableRequest(project=project, table_spec=feature_table_proto), # type: ignore - timeout=self._config.getint(opt.GRPC_CONNECTION_TIMEOUT), - metadata=self._get_grpc_metadata(), - ) - except grpc.RpcError as e: - raise grpc.RpcError(e.details()) - - # Extract the returned feature table - applied_feature_table = FeatureTable.from_proto( - apply_feature_table_response.table - ) - - # Deep copy from the returned feature table to the local entity - feature_table._update_from_feature_table(applied_feature_table) - - def list_feature_tables( - self, project: str = None, labels: Dict[str, str] = dict() - ) -> List[FeatureTable]: - """ - Retrieve a list of feature tables from Feast Core - - Args: - project: Filter feature tables based on project name - - Returns: - List of feature tables - """ - - if project is None: - project = self.project - - if self._use_object_store_registry: - return self._registry.list_feature_tables(project) - else: - filter = ListFeatureTablesRequest.Filter(project=project, labels=labels) - - # Get latest feature tables from Feast Core - feature_table_protos = self._core_service.ListFeatureTables( - ListFeatureTablesRequest(filter=filter), - metadata=self._get_grpc_metadata(), - ) - - # Extract feature tables and return - feature_tables = [] - for feature_table_proto in feature_table_protos.tables: - feature_table = FeatureTable.from_proto(feature_table_proto) - feature_table._client = self - feature_tables.append(feature_table) - return feature_tables - - def get_feature_table(self, name: str, project: str = None) -> FeatureTable: - """ - Retrieves a feature table. - - Args: - project: Feast project that this feature table belongs to - name: Name of feature table - - Returns: - Returns either the specified feature table, or raises an exception if - none is found - """ - - self._usage.log("get_feature_table") - - if project is None: - project = self.project - - if self._use_object_store_registry: - return self._registry.get_feature_table(name, project) - else: - try: - get_feature_table_response = self._core_service.GetFeatureTable( - GetFeatureTableRequest(project=project, name=name.strip()), - metadata=self._get_grpc_metadata(), - ) - except grpc.RpcError as e: - raise grpc.RpcError(e.details()) - return FeatureTable.from_proto(get_feature_table_response.table) - - def delete_feature_table(self, name: str, project: str = None) -> None: - """ - Deletes a feature table. - - Args: - project: Feast project that this feature table belongs to - name: Name of feature table - """ - - if project is None: - project = self.project - - if self._use_object_store_registry: - return self._registry.delete_feature_table(name, project) - else: - try: - self._core_service.DeleteFeatureTable( - DeleteFeatureTableRequest(project=project, name=name.strip()), - metadata=self._get_grpc_metadata(), - ) - except grpc.RpcError as e: - raise grpc.RpcError(e.details()) - - def list_features_by_ref( - self, - project: str = None, - entities: List[str] = list(), - labels: Dict[str, str] = dict(), - ) -> Dict[FeatureRef, Feature]: - """ - Retrieve a dictionary of feature reference to feature from Feast Core based on filters provided. - - Args: - project: Feast project that these features belongs to - entities: Feast entity that these features are associated with - labels: Feast labels that these features are associated with - - Returns: - Dictionary of - - Examples: - >>> from feast import Client - >>> - >>> feast_client = Client(core_url="localhost:6565") - >>> features = feast_client.list_features(project="test_project", entities=["driver_id"], labels={"key1":"val1","key2":"val2"}) - >>> print(features) - """ - - if self._use_object_store_registry: - raise NotImplementedError( - "This function is not implemented for object store registry." - ) - else: - if project is None: - project = self.project - - filter = ListFeaturesRequest.Filter( - project=project, entities=entities, labels=labels - ) - - feature_protos = self._core_service.ListFeatures( - ListFeaturesRequest(filter=filter), metadata=self._get_grpc_metadata(), - ) - - # Extract features and return - features_dict = {} - for ref_str, feature_proto in feature_protos.features.items(): - feature_ref = FeatureRef.from_str(ref_str) - feature = Feature.from_proto(feature_proto) - features_dict[feature_ref] = feature - - return features_dict - - def ingest( - self, - feature_table: Union[str, FeatureTable], - source: Union[pd.DataFrame, str], - project: str = None, - chunk_size: int = 10000, - max_workers: int = max(CPU_COUNT - 1, 1), - timeout: int = int(opt().BATCH_INGESTION_PRODUCTION_TIMEOUT), - ) -> None: - """ - Batch load feature data into a FeatureTable. - - Args: - feature_table (typing.Union[str, feast.feature_table.FeatureTable]): - FeatureTable object or the string name of the feature table - - source (typing.Union[pd.DataFrame, str]): - Either a file path or Pandas Dataframe to ingest into Feast - Files that are currently supported: - * parquet - * csv - * json - - project: Feast project to locate FeatureTable - - chunk_size (int): - Amount of rows to load and ingest at a time. - - max_workers (int): - Number of worker processes to use to encode values. - - timeout (int): - Timeout in seconds to wait for completion. - - Examples: - >>> from feast import Client - >>> - >>> client = Client(core_url="localhost:6565") - >>> ft_df = pd.DataFrame( - >>> { - >>> "datetime": [pd.datetime.now()], - >>> "driver": [1001], - >>> "rating": [4.3], - >>> } - >>> ) - >>> client.set_project("project1") - >>> - >>> driver_ft = client.get_feature_table("driver") - >>> client.ingest(driver_ft, ft_df) - """ - - self._usage.log("ingest") - if project is None: - project = self.project - if isinstance(feature_table, str): - name = feature_table - if isinstance(feature_table, FeatureTable): - name = feature_table.name - - fetched_feature_table: Optional[FeatureTable] = self.get_feature_table( - name, project - ) - if fetched_feature_table is not None: - feature_table = fetched_feature_table - else: - raise Exception(f"FeatureTable, {name} cannot be found.") - - # Check 1) Only parquet file format for FeatureTable batch source is supported - if ( - feature_table.batch_source - and issubclass(type(feature_table.batch_source), FileSource) - and isinstance( - type(feature_table.batch_source.file_options.file_format), ParquetFormat - ) - ): - raise Exception( - f"No suitable batch source found for FeatureTable, {name}." - f"Only BATCH_FILE source with parquet format is supported for batch ingestion." - ) - - pyarrow_table, column_names = _read_table_from_source(source) - # Check 2) Check if FeatureTable batch source field mappings can be found in provided source table - _check_field_mappings( - column_names, - name, - feature_table.batch_source.event_timestamp_column, - feature_table.batch_source.field_mapping, - ) - - dir_path = None - with_partitions = False - if ( - issubclass(type(feature_table.batch_source), FileSource) - and feature_table.batch_source.date_partition_column - ): - with_partitions = True - dest_path = _write_partitioned_table_from_source( - column_names, - pyarrow_table, - feature_table.batch_source.date_partition_column, - feature_table.batch_source.event_timestamp_column, - ) - else: - dir_path, dest_path = _write_non_partitioned_table_from_source( - column_names, pyarrow_table, chunk_size, max_workers, - ) - - try: - if issubclass(type(feature_table.batch_source), FileSource): - file_url = feature_table.batch_source.file_options.file_url.rstrip("*") - _upload_to_file_source( - file_url, with_partitions, dest_path, self._config - ) - if issubclass(type(feature_table.batch_source), BigQuerySource): - bq_table_ref = feature_table.batch_source.bigquery_options.table_ref - feature_table_timestamp_column = ( - feature_table.batch_source.event_timestamp_column - ) - - _upload_to_bq_source( - bq_table_ref, feature_table_timestamp_column, dest_path - ) - finally: - # Remove parquet file(s) that were created earlier - print("Removing temporary file(s)...") - if dir_path: - shutil.rmtree(dir_path) - - print("Data has been successfully ingested into FeatureTable batch source.") - - def _get_grpc_metadata(self): - """ - Returns a metadata tuple to attach to gRPC requests. This is primarily - used when authentication is enabled but SSL/TLS is disabled. - - Returns: Tuple of metadata to attach to each gRPC call - """ - if self._config.getboolean(opt.ENABLE_AUTH) and self._auth_metadata: - return self._auth_metadata.get_signed_meta() - return () - - def get_online_features( - self, - feature_refs: List[str], - entity_rows: List[Dict[str, Any]], - project: Optional[str] = None, - ) -> OnlineResponse: - """ - Retrieves the latest online feature data from Feast Serving. - Args: - feature_refs: List of feature references that will be returned for each entity. - Each feature reference should have the following format: - "feature_table:feature" where "feature_table" & "feature" refer to - the feature and feature table names respectively. - Only the feature name is required. - entity_rows: A list of dictionaries where each key-value is an entity-name, entity-value pair. - project: Optionally specify the the project override. If specified, uses given project for retrieval. - Overrides the projects specified in Feature References if also are specified. - Returns: - GetOnlineFeaturesResponse containing the feature data in records. - Each EntityRow provided will yield one record, which contains - data fields with data value and field status metadata (if included). - Examples: - >>> from feast import Client - >>> - >>> feast_client = Client(core_url="localhost:6565", serving_url="localhost:6566") - >>> feature_refs = ["sales:daily_transactions"] - >>> entity_rows = [{"customer_id": 0},{"customer_id": 1}] - >>> - >>> online_response = feast_client.get_online_features( - >>> feature_refs, entity_rows, project="my_project") - >>> online_response_dict = online_response.to_dict() - >>> print(online_response_dict) - {'sales:daily_transactions': [1.1,1.2], 'sales:customer_id': [0,1]} - """ - - self._usage.log("get_online_features") - try: - response = self._serving_service.GetOnlineFeaturesV2( - GetOnlineFeaturesRequestV2( - features=_build_feature_references(feature_ref_strs=feature_refs), - entity_rows=_infer_online_entity_rows(entity_rows), - project=project if project is not None else self.project, - ), - timeout=self._config.getint(opt.GRPC_CONNECTION_TIMEOUT), - metadata=self._get_grpc_metadata(), - ) - except grpc.RpcError as e: - raise grpc.RpcError(e.details()) - - response = OnlineResponse(response) - return response diff --git a/sdk/python/feast/config.py b/sdk/python/feast/config.py deleted file mode 100644 index 9bc8cf4913..0000000000 --- a/sdk/python/feast/config.py +++ /dev/null @@ -1,222 +0,0 @@ -# -# Copyright 2019 The Feast Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import os -from configparser import ConfigParser, NoOptionError -from os.path import expanduser, join -from typing import Dict, Optional - -from feast.constants import ( - CONFIG_FEAST_ENV_VAR_PREFIX, - CONFIG_FILE_DEFAULT_DIRECTORY, - CONFIG_FILE_NAME, - CONFIG_FILE_SECTION, - FEAST_CONFIG_FILE_ENV, -) -from feast.constants import ConfigOptions as opt - -_logger = logging.getLogger(__name__) -_UNSET = object() - - -def _init_config(path: str): - """ - Returns a ConfigParser that reads in a feast configuration file. If the - file does not exist it will be created. - - Args: - path: Optional path to initialize as Feast configuration - - Returns: ConfigParser of the Feast configuration file, with defaults - preloaded - - """ - # Create the configuration file directory if needed - config_dir = os.path.dirname(path) - config_dir = config_dir.rstrip("/") + "/" - - os.makedirs(os.path.dirname(config_dir), exist_ok=True) - - # Create the configuration file itself - config = ConfigParser(defaults=opt().defaults(), allow_no_value=True) - if os.path.exists(path): - config.read(path) - - # Store all configuration in a single section - if not config.has_section(CONFIG_FILE_SECTION): - config.add_section(CONFIG_FILE_SECTION) - - return config - - -def _get_feast_env_vars(): - """ - Get environmental variables that start with "FEAST_" - - Returns: Dict of Feast environmental variables (stripped of prefix) - """ - feast_env_vars = {} - for key in os.environ.keys(): - if key.upper().startswith(CONFIG_FEAST_ENV_VAR_PREFIX): - feast_env_vars[key[len(CONFIG_FEAST_ENV_VAR_PREFIX) :]] = os.environ[key] - return feast_env_vars - - -class Config: - """ - Maintains and provides access to Feast configuration - - Configuration is stored as key/value pairs. The user can specify options - through either input arguments to this class, environmental variables, or - by setting the config in a configuration file - - """ - - def __init__( - self, options: Optional[Dict[str, str]] = None, path: Optional[str] = None, - ): - """ - Configuration options are returned as follows (higher replaces lower) - 1. Initialized options ("options" argument) - 2. Environmental variables (reloaded on every "get") - 3. Configuration file options (loaded once) - 4. Default options (loaded once from memory) - - Args: - options: (optional) A list of initialized/hardcoded options. - path: (optional) File path to configuration file - """ - if not path: - path = join( - expanduser("~"), - os.environ.get(FEAST_CONFIG_FILE_ENV, CONFIG_FILE_DEFAULT_DIRECTORY,), - CONFIG_FILE_NAME, - ) - - config = _init_config(path) - - self._options = {} - if options and isinstance(options, dict): - self._options = options - - self._config = config - self._path = path - - def _get(self, option, default, get_method): - fallback = {} if default is _UNSET else {"fallback": default} - return get_method( - CONFIG_FILE_SECTION, - option, - vars={**_get_feast_env_vars(), **self._options}, - **fallback, - ) - - def get(self, option, default=_UNSET): - """ - Returns a single configuration option as a string - - Args: - option: Name of the option - default: Default value to return if option is not found - - Returns: String option that is returned - - """ - return self._get(option, default, self._config.get) - - def getboolean(self, option, default=_UNSET): - """ - Returns a single configuration option as a boolean - - Args: - option: Name of the option - default: Default value to return if option is not found - - Returns: Boolean option value that is returned - - """ - return self._get(option, default, self._config.getboolean) - - def getint(self, option, default=_UNSET): - """ - Returns a single configuration option as an integer - - Args: - option: Name of the option - default: Default value to return if option is not found - - Returns: Integer option value that is returned - - """ - return self._get(option, default, self._config.getint) - - def getfloat(self, option, default=_UNSET): - """ - Returns a single configuration option as an integer - - Args: - option: Name of the option - default: Default value to return if option is not found - - Returns: Float option value that is returned - - """ - return self._get(option, default, self._config.getfloat) - - def set(self, option, value): - """ - Sets a configuration option. Must be serializable to string - Args: - option: Option name to use as key - value: Value to store under option - """ - self._config.set(CONFIG_FILE_SECTION, option, value=str(value)) - - def exists(self, option): - """ - Tests whether a specific option is available - - Args: - option: Name of the option to check - - Returns: Boolean true/false whether the option is set - - """ - try: - self.get(option=option) - return True - except NoOptionError: - return False - - def save(self): - """ - Save the current configuration to disk. This does not include - environmental variables or initialized options - """ - defaults = self._config.defaults() - try: - self._config._defaults = {} - self._config.write(open(self._path, "w")) - finally: - self._config._defaults = defaults - - def __str__(self): - result = "" - for section_name in self._config.sections(): - result += "\n[" + section_name + "]\n" - for name, value in self._config.items(section_name): - result += name + " = " + value + "\n" - return result diff --git a/sdk/python/feast/constants.py b/sdk/python/feast/constants.py index 8bf6ea16ec..a2fe6f15c5 100644 --- a/sdk/python/feast/constants.py +++ b/sdk/python/feast/constants.py @@ -13,262 +13,33 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from enum import Enum -from typing import Optional - - -class AuthProvider(Enum): - GOOGLE = "google" - OAUTH = "oauth" - - -class Option: - def __init__(self, name, default): - self._name = name - self._default = default - - def __get__(self, instance, owner): - if instance is None: - return self._name.lower() - - return self._default - - -class ConfigMeta(type): - """ - Class factory which customizes ConfigOptions class instantiation. - Specifically, setting configuration option's name to lowercase of capitalized variable. - """ - - def __new__(cls, name, bases, attrs): - keys = [ - k for k, v in attrs.items() if not k.startswith("_") and not callable(v) - ] - attrs["__config_keys__"] = keys - attrs.update({k: Option(k, attrs[k]) for k in keys}) - return super().__new__(cls, name, bases, attrs) - - -#: Default datetime column name for point-in-time join -DATETIME_COLUMN: str = "datetime" - -#: Environmental variable to specify Feast configuration file location -FEAST_CONFIG_FILE_ENV: str = "FEAST_CONFIG" - -#: Default prefix to Feast environmental variables -CONFIG_FEAST_ENV_VAR_PREFIX: str = "FEAST_" - -#: Default directory to Feast configuration file -CONFIG_FILE_DEFAULT_DIRECTORY: str = ".feast" - -#: Default Feast configuration file name -CONFIG_FILE_NAME: str = "config" - -#: Default section in Feast configuration file to specify options -CONFIG_FILE_SECTION: str = "general" # Maximum interval(secs) to wait between retries for retry function MAX_WAIT_INTERVAL: str = "60" +AWS_LAMBDA_FEATURE_SERVER_IMAGE = "feastdev/feature-server-python-aws" +AWS_LAMBDA_FEATURE_SERVER_REPOSITORY = "feast-python-server" -class ConfigOptions(metaclass=ConfigMeta): - """ Feast Configuration Options """ - - #: Feast project namespace to use - PROJECT: str = "default" - - #: Default Feast Core URL - CORE_URL: str = "localhost:6565" - - #: Enable or disable TLS/SSL to Feast Core - CORE_ENABLE_SSL: str = "False" - - #: Enable user authentication to Feast Core - ENABLE_AUTH: str = "False" - - #: JWT Auth token for user authentication to Feast - AUTH_TOKEN: Optional[str] = None - - #: Path to certificate(s) to secure connection to Feast Core - CORE_SERVER_SSL_CERT: str = "" - - #: Default Feast Serving URL - SERVING_URL: str = "localhost:6566" - - #: Enable or disable TLS/SSL to Feast Serving - SERVING_ENABLE_SSL: str = "False" - - #: Path to certificate(s) to secure connection to Feast Serving - SERVING_SERVER_SSL_CERT: str = "" - - #: Default Feast Job Service URL - JOB_SERVICE_URL: Optional[str] = None - - #: Enable or disable TLS/SSL to Feast Job Service - JOB_SERVICE_ENABLE_SSL: str = "False" - - #: Path to certificate(s) to secure connection to Feast Job Service - JOB_SERVICE_SERVER_SSL_CERT: str = "" - - #: Enable or disable control loop for Feast Job Service - JOB_SERVICE_ENABLE_CONTROL_LOOP: str = "False" - - #: Default connection timeout to Feast Serving, Feast Core, and Feast Job Service (in seconds) - GRPC_CONNECTION_TIMEOUT: str = "10" - - #: Default gRPC connection timeout when sending an ApplyFeatureTable command to Feast Core (in seconds) - GRPC_CONNECTION_TIMEOUT_APPLY: str = "600" - - #: Default timeout when running batch ingestion - BATCH_INGESTION_PRODUCTION_TIMEOUT: str = "120" - - #: Time to wait for historical feature requests before timing out. - BATCH_FEATURE_REQUEST_WAIT_TIME_SECONDS: str = "600" - - #: Endpoint URL for S3 storage_client - S3_ENDPOINT_URL: Optional[str] = None - - #: Account name for Azure blob storage_client - AZURE_BLOB_ACCOUNT_NAME: Optional[str] = None - - #: Account access key for Azure blob storage_client - AZURE_BLOB_ACCOUNT_ACCESS_KEY: Optional[str] = None - - #: Authentication Provider - Google OpenID/OAuth - #: - #: Options: "google" / "oauth" - AUTH_PROVIDER: str = "google" - - #: Spark Job launcher. The choice of storage is connected to the choice of SPARK_LAUNCHER. - #: - #: Options: "standalone", "dataproc", "emr" - SPARK_LAUNCHER: Optional[str] = None - - #: Feast Spark Job ingestion jobs staging location. The choice of storage is connected to the choice of SPARK_LAUNCHER. - #: - #: Eg. gs://some-bucket/output/, s3://some-bucket/output/, file:///data/subfolder/ - SPARK_STAGING_LOCATION: Optional[str] = None - - #: Feast Spark Job ingestion jar file. The choice of storage is connected to the choice of SPARK_LAUNCHER. - #: - #: Eg. "dataproc" (http and gs), "emr" (http and s3), "standalone" (http and file) - SPARK_INGESTION_JAR: str = "https://storage.googleapis.com/feast-jobs/spark/ingestion/feast-ingestion-spark-develop.jar" - - #: Spark resource manager master url - SPARK_STANDALONE_MASTER: str = "local[*]" - - #: Directory where Spark is installed - SPARK_HOME: Optional[str] = None - - #: The project id where the materialized view of BigQuerySource is going to be created - #: by default, use the same project where view is located - SPARK_BQ_MATERIALIZATION_PROJECT: Optional[str] = None - - #: The dataset id where the materialized view of BigQuerySource is going to be created - #: by default, use the same dataset where view is located - SPARK_BQ_MATERIALIZATION_DATASET: Optional[str] = None - - #: Dataproc cluster to run Feast Spark Jobs in - DATAPROC_CLUSTER_NAME: Optional[str] = None - - #: Project of Dataproc cluster - DATAPROC_PROJECT: Optional[str] = None - - #: Region of Dataproc cluster - DATAPROC_REGION: Optional[str] = None - - #: No. of executor instances for Dataproc cluster - DATAPROC_EXECUTOR_INSTANCES = "2" - - #: No. of executor cores for Dataproc cluster - DATAPROC_EXECUTOR_CORES = "2" - - #: No. of executor memory for Dataproc cluster - DATAPROC_EXECUTOR_MEMORY = "2g" - - # namespace to use for Spark jobs launched using k8s spark operator - SPARK_K8S_NAMESPACE = "default" - - # expect k8s spark operator to be running in the same cluster as Feast - SPARK_K8S_USE_INCLUSTER_CONFIG = "True" - - # SparkApplication resource template - # Eg. '/home/jovyan/work/sparkapp-template.yaml' - SPARK_K8S_JOB_TEMPLATE_PATH = None - - #: File format of historical retrieval features - HISTORICAL_FEATURE_OUTPUT_FORMAT: str = "parquet" - - #: File location of historical retrieval features - HISTORICAL_FEATURE_OUTPUT_LOCATION: Optional[str] = None - - #: Default Redis host - REDIS_HOST: str = "localhost" - - #: Default Redis port - REDIS_PORT: str = "6379" - - #: Enable or disable TLS/SSL to Redis - REDIS_SSL: str = "False" - - #: Enable or disable StatsD - STATSD_ENABLED: str = "False" - - #: Default StatsD port - STATSD_HOST: Optional[str] = None - - #: Default StatsD port - STATSD_PORT: Optional[str] = None - - #: Ingestion Job DeadLetter Destination. The choice of storage is connected to the choice of SPARK_LAUNCHER. - #: - #: Eg. gs://some-bucket/output/, s3://some-bucket/output/, file:///data/subfolder/ - DEADLETTER_PATH: str = "" - - #: ProtoRegistry Address (currently only Stencil Server is supported as registry) - #: https://github.com/gojekfarm/stencil - STENCIL_URL: str = "" - - #: If set to true rows that do not pass custom validation (see feast.contrib.validation) - #: won't be saved to Online Storage - INGESTION_DROP_INVALID_ROWS = "False" - - #: EMR cluster to run Feast Spark Jobs in - EMR_CLUSTER_ID: Optional[str] = None - - #: Region of EMR cluster - EMR_REGION: Optional[str] = None - - #: Template path of EMR cluster - EMR_CLUSTER_TEMPLATE_PATH: Optional[str] = None - - #: Log path of EMR cluster - EMR_LOG_LOCATION: Optional[str] = None - - #: Oauth grant type - OAUTH_GRANT_TYPE: Optional[str] = None +# feature_store.yaml environment variable name for remote feature server +FEATURE_STORE_YAML_ENV_NAME: str = "FEATURE_STORE_YAML_BASE64" - #: Oauth client ID - OAUTH_CLIENT_ID: Optional[str] = None +# Environment variable for registry +REGISTRY_ENV_NAME: str = "REGISTRY_BASE64" - #: Oauth client secret - OAUTH_CLIENT_SECRET: Optional[str] = None +# Environment variable for toggling usage +FEAST_USAGE = "FEAST_USAGE" - #: Oauth intended recipients - OAUTH_AUDIENCE: Optional[str] = None +# Default value for FEAST_USAGE when environment variable is not set +DEFAULT_FEAST_USAGE_VALUE = "True" - #: Oauth token request url - OAUTH_TOKEN_REQUEST_URL: Optional[str] = None +# Environment variable for the path for overwriting universal test configs +FULL_REPO_CONFIGS_MODULE_ENV_NAME: str = "FULL_REPO_CONFIGS_MODULE" - #: Usage enabled - USAGE = "True" +# Environment variable for overwriting FTS port +FEATURE_TRANSFORMATION_SERVER_PORT_ENV_NAME: str = "FEATURE_TRANSFORMATION_SERVER_PORT" - #: Object store registry - REGISTRY_PATH: Optional[str] = None +# Default FTS port +DEFAULT_FEATURE_TRANSFORMATION_SERVER_PORT = 6569 - def defaults(self): - return { - k: getattr(self, k) - for k in self.__config_keys__ - if getattr(self, k) is not None - } +# Environment variable for feature server docker image tag +DOCKER_IMAGE_TAG_ENV_NAME: str = "FEAST_SERVER_DOCKER_IMAGE_TAG" diff --git a/sdk/python/feast/data_source.py b/sdk/python/feast/data_source.py index dcad135d8d..6a2b9a0d14 100644 --- a/sdk/python/feast/data_source.py +++ b/sdk/python/feast/data_source.py @@ -12,15 +12,19 @@ # See the License for the specific language governing permissions and # limitations under the License. - import enum +import warnings from abc import ABC, abstractmethod -from typing import Callable, Dict, Iterable, Optional, Tuple +from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple, Union + +from google.protobuf.json_format import MessageToJson from feast import type_map from feast.data_format import StreamFormat +from feast.field import Field from feast.protos.feast.core.DataSource_pb2 import DataSource as DataSourceProto from feast.repo_config import RepoConfig, get_data_source_class_from_type +from feast.types import VALUE_TYPES_TO_FEAST_TYPES from feast.value_type import ValueType @@ -34,6 +38,7 @@ class SourceType(enum.Enum): BATCH_BIGQUERY = 2 STREAM_KAFKA = 3 STREAM_KINESIS = 4 + BATCH_TRINO = 5 class KafkaOptions: @@ -44,51 +49,9 @@ class KafkaOptions: def __init__( self, bootstrap_servers: str, message_format: StreamFormat, topic: str, ): - self._bootstrap_servers = bootstrap_servers - self._message_format = message_format - self._topic = topic - - @property - def bootstrap_servers(self): - """ - Returns a comma-separated list of Kafka bootstrap servers - """ - return self._bootstrap_servers - - @bootstrap_servers.setter - def bootstrap_servers(self, bootstrap_servers): - """ - Sets a comma-separated list of Kafka bootstrap servers - """ - self._bootstrap_servers = bootstrap_servers - - @property - def message_format(self): - """ - Returns the data format that is used to encode the feature data in Kafka messages - """ - return self._message_format - - @message_format.setter - def message_format(self, message_format): - """ - Sets the data format that is used to encode the feature data in Kafka messages - """ - self._message_format = message_format - - @property - def topic(self): - """ - Returns the Kafka topic to collect feature data from - """ - return self._topic - - @topic.setter - def topic(self, topic): - """ - Sets the Kafka topic to collect feature data from - """ - self._topic = topic + self.bootstrap_servers = bootstrap_servers + self.message_format = message_format + self.topic = topic @classmethod def from_proto(cls, kafka_options_proto: DataSourceProto.KafkaOptions): @@ -135,51 +98,9 @@ class KinesisOptions: def __init__( self, record_format: StreamFormat, region: str, stream_name: str, ): - self._record_format = record_format - self._region = region - self._stream_name = stream_name - - @property - def record_format(self): - """ - Returns the data format used to encode the feature data in the Kinesis records. - """ - return self._record_format - - @record_format.setter - def record_format(self, record_format): - """ - Sets the data format used to encode the feature data in the Kinesis records. - """ - self._record_format = record_format - - @property - def region(self): - """ - Returns the AWS region of Kinesis stream - """ - return self._region - - @region.setter - def region(self, region): - """ - Sets the AWS region of Kinesis stream - """ - self._region = region - - @property - def stream_name(self): - """ - Returns the Kinesis stream name to obtain feature data from - """ - return self._stream_name - - @stream_name.setter - def stream_name(self, stream_name): - """ - Sets the Kinesis stream name to obtain feature data from - """ - self._stream_name = stream_name + self.record_format = record_format + self.region = region + self.stream_name = stream_name @classmethod def from_proto(cls, kinesis_options_proto: DataSourceProto.KinesisOptions): @@ -218,145 +139,182 @@ def to_proto(self) -> DataSourceProto.KinesisOptions: return kinesis_options_proto +_DATA_SOURCE_OPTIONS = { + DataSourceProto.SourceType.BATCH_FILE: "feast.infra.offline_stores.file_source.FileSource", + DataSourceProto.SourceType.BATCH_BIGQUERY: "feast.infra.offline_stores.bigquery_source.BigQuerySource", + DataSourceProto.SourceType.BATCH_REDSHIFT: "feast.infra.offline_stores.redshift_source.RedshiftSource", + DataSourceProto.SourceType.BATCH_SNOWFLAKE: "feast.infra.offline_stores.snowflake_source.SnowflakeSource", + DataSourceProto.SourceType.BATCH_TRINO: "feast.infra.offline_stores.contrib.trino_offline_store.trino_source.TrinoSource", + DataSourceProto.SourceType.BATCH_SPARK: "feast.infra.offline_stores.contrib.spark_offline_store.spark_source.SparkSource", + DataSourceProto.SourceType.STREAM_KAFKA: "feast.data_source.KafkaSource", + DataSourceProto.SourceType.STREAM_KINESIS: "feast.data_source.KinesisSource", + DataSourceProto.SourceType.REQUEST_SOURCE: "feast.data_source.RequestSource", + DataSourceProto.SourceType.PUSH_SOURCE: "feast.data_source.PushSource", +} + + class DataSource(ABC): """ - DataSource that can be used source features + DataSource that can be used to source features. + + Args: + name: Name of data source, which should be unique within a project + timestamp_field (optional): (Deprecated) Event timestamp column used for point in time + joins of feature values. + created_timestamp_column (optional): Timestamp column indicating when the row + was created, used for deduplicating rows. + field_mapping (optional): A dictionary mapping of column names in this data + source to feature names in a feature table or view. Only used for feature + columns, not entity or timestamp columns. + date_partition_column (optional): Timestamp column used for partitioning. + description (optional) A human-readable description. + tags (optional): A dictionary of key-value pairs to store arbitrary metadata. + owner (optional): The owner of the data source, typically the email of the primary + maintainer. + timestamp_field (optional): Event timestamp field used for point in time + joins of feature values. """ + name: str + timestamp_field: str + created_timestamp_column: str + field_mapping: Dict[str, str] + date_partition_column: str + description: str + tags: Dict[str, str] + owner: str + def __init__( self, - event_timestamp_column: Optional[str] = "", - created_timestamp_column: Optional[str] = "", + *, + event_timestamp_column: Optional[str] = None, + created_timestamp_column: Optional[str] = None, field_mapping: Optional[Dict[str, str]] = None, - date_partition_column: Optional[str] = "", + date_partition_column: Optional[str] = None, + description: Optional[str] = "", + tags: Optional[Dict[str, str]] = None, + owner: Optional[str] = "", + name: Optional[str] = None, + timestamp_field: Optional[str] = None, ): - self._event_timestamp_column = event_timestamp_column - self._created_timestamp_column = created_timestamp_column - self._field_mapping = field_mapping if field_mapping else {} - self._date_partition_column = date_partition_column + """ + Creates a DataSource object. + Args: + name: Name of data source, which should be unique within a project + event_timestamp_column (optional): (Deprecated) Event timestamp column used for point in time + joins of feature values. + created_timestamp_column (optional): Timestamp column indicating when the row + was created, used for deduplicating rows. + field_mapping (optional): A dictionary mapping of column names in this data + source to feature names in a feature table or view. Only used for feature + columns, not entity or timestamp columns. + date_partition_column (optional): Timestamp column used for partitioning. + description (optional): A human-readable description. + tags (optional): A dictionary of key-value pairs to store arbitrary metadata. + owner (optional): The owner of the data source, typically the email of the primary + maintainer. + timestamp_field (optional): Event timestamp field used for point + in time joins of feature values. + """ + if not name: + warnings.warn( + ( + "Names for data sources need to be supplied. " + "Data sources without names will not be supported after Feast 0.23." + ), + UserWarning, + ) + self.name = name or "" + if not timestamp_field and event_timestamp_column: + warnings.warn( + ( + "The argument 'event_timestamp_column' is being deprecated. Please use 'timestamp_field' instead. " + "instead. Feast 0.23 and onwards will not support the argument 'event_timestamp_column' for datasources." + ), + DeprecationWarning, + ) + self.timestamp_field = timestamp_field or event_timestamp_column or "" + self.created_timestamp_column = ( + created_timestamp_column if created_timestamp_column else "" + ) + self.field_mapping = field_mapping if field_mapping else {} + self.date_partition_column = ( + date_partition_column if date_partition_column else "" + ) + self.description = description or "" + self.tags = tags or {} + self.owner = owner or "" + + def __hash__(self): + return hash((self.name, self.timestamp_field)) + + def __str__(self): + return str(MessageToJson(self.to_proto())) def __eq__(self, other): + if other is None: + return False + if not isinstance(other, DataSource): raise TypeError("Comparisons should only involve DataSource class objects.") if ( - self.event_timestamp_column != other.event_timestamp_column + self.name != other.name + or self.timestamp_field != other.timestamp_field or self.created_timestamp_column != other.created_timestamp_column or self.field_mapping != other.field_mapping or self.date_partition_column != other.date_partition_column + or self.description != other.description + or self.tags != other.tags + or self.owner != other.owner ): return False return True - @property - def field_mapping(self): - """ - Returns the field mapping of this data source - """ - return self._field_mapping - - @field_mapping.setter - def field_mapping(self, field_mapping): - """ - Sets the field mapping of this data source - """ - self._field_mapping = field_mapping - - @property - def event_timestamp_column(self): - """ - Returns the event timestamp column of this data source - """ - return self._event_timestamp_column - - @event_timestamp_column.setter - def event_timestamp_column(self, event_timestamp_column): - """ - Sets the event timestamp column of this data source - """ - self._event_timestamp_column = event_timestamp_column - - @property - def created_timestamp_column(self): - """ - Returns the created timestamp column of this data source - """ - return self._created_timestamp_column - - @created_timestamp_column.setter - def created_timestamp_column(self, created_timestamp_column): - """ - Sets the created timestamp column of this data source - """ - self._created_timestamp_column = created_timestamp_column - - @property - def date_partition_column(self): - """ - Returns the date partition column of this data source - """ - return self._date_partition_column - - @date_partition_column.setter - def date_partition_column(self, date_partition_column): - """ - Sets the date partition column of this data source - """ - self._date_partition_column = date_partition_column - @staticmethod @abstractmethod - def from_proto(data_source: DataSourceProto): - """ - Convert data source config in FeatureTable spec to a DataSource class object. + def from_proto(data_source: DataSourceProto) -> Any: """ + Converts data source config in protobuf spec to a DataSource class object. - if data_source.data_source_class_type: - cls = get_data_source_class_from_type(data_source.data_source_class_type) - return cls.from_proto(data_source) + Args: + data_source: A protobuf representation of a DataSource. - if data_source.file_options.file_format and data_source.file_options.file_url: - from feast.infra.offline_stores.file_source import FileSource + Returns: + A DataSource class object. - data_source_obj = FileSource.from_proto(data_source) - elif ( - data_source.bigquery_options.table_ref or data_source.bigquery_options.query + Raises: + ValueError: The type of DataSource could not be identified. + """ + data_source_type = data_source.type + if not data_source_type or ( + data_source_type + not in list(_DATA_SOURCE_OPTIONS.keys()) + + [DataSourceProto.SourceType.CUSTOM_SOURCE] ): - from feast.infra.offline_stores.bigquery_source import BigQuerySource - - data_source_obj = BigQuerySource.from_proto(data_source) - elif data_source.redshift_options.table or data_source.redshift_options.query: - from feast.infra.offline_stores.redshift_source import RedshiftSource + raise ValueError("Could not identify the source type being added.") - data_source_obj = RedshiftSource.from_proto(data_source) - elif ( - data_source.kafka_options.bootstrap_servers - and data_source.kafka_options.topic - and data_source.kafka_options.message_format - ): - data_source_obj = KafkaSource.from_proto(data_source) - elif ( - data_source.kinesis_options.record_format - and data_source.kinesis_options.region - and data_source.kinesis_options.stream_name - ): - data_source_obj = KinesisSource.from_proto(data_source) - else: - raise ValueError("Could not identify the source type being added") + if data_source_type == DataSourceProto.SourceType.CUSTOM_SOURCE: + cls = get_data_source_class_from_type(data_source.data_source_class_type) + return cls.from_proto(data_source) - return data_source_obj + cls = get_data_source_class_from_type(_DATA_SOURCE_OPTIONS[data_source_type]) + return cls.from_proto(data_source) @abstractmethod def to_proto(self) -> DataSourceProto: """ - Converts an DataSourceProto object to its protobuf representation. + Converts a DataSourceProto object to its protobuf representation. """ raise NotImplementedError def validate(self, config: RepoConfig): """ Validates the underlying data source. + + Args: + config: Configuration object used to configure a feature store. """ raise NotImplementedError @@ -364,7 +322,7 @@ def validate(self, config: RepoConfig): @abstractmethod def source_datatype_to_feast_value_type() -> Callable[[str], ValueType]: """ - Get the callable method that returns Feast type given the raw column type + Returns the callable method that returns Feast type given the raw column type. """ raise NotImplementedError @@ -372,7 +330,16 @@ def get_table_column_names_and_types( self, config: RepoConfig ) -> Iterable[Tuple[str, str]]: """ - Get the list of column names and raw column types + Returns the list of column names and raw column types. + + Args: + config: Configuration object used to configure a feature store. + """ + raise NotImplementedError + + def get_table_query_string(self) -> str: + """ + Returns a string that can directly be used to reference this table in SQL. """ raise NotImplementedError @@ -388,24 +355,77 @@ def get_table_column_names_and_types( def __init__( self, - event_timestamp_column: str, - bootstrap_servers: str, - message_format: StreamFormat, - topic: str, + *args, + name: Optional[str] = None, + event_timestamp_column: Optional[str] = "", + bootstrap_servers: Optional[str] = None, + message_format: Optional[StreamFormat] = None, + topic: Optional[str] = None, created_timestamp_column: Optional[str] = "", field_mapping: Optional[Dict[str, str]] = None, date_partition_column: Optional[str] = "", + description: Optional[str] = "", + tags: Optional[Dict[str, str]] = None, + owner: Optional[str] = "", + timestamp_field: Optional[str] = "", + batch_source: Optional[DataSource] = None, ): + positional_attributes = [ + "name", + "event_timestamp_column", + "bootstrap_servers", + "message_format", + "topic", + ] + _name = name + _event_timestamp_column = event_timestamp_column + _bootstrap_servers = bootstrap_servers or "" + _message_format = message_format + _topic = topic or "" + + if args: + warnings.warn( + ( + "Kafka parameters should be specified as a keyword argument instead of a positional arg." + "Feast 0.23+ will not support positional arguments to construct Kafka sources" + ), + DeprecationWarning, + ) + if len(args) > len(positional_attributes): + raise ValueError( + f"Only {', '.join(positional_attributes)} are allowed as positional args when defining " + f"Kafka sources, for backwards compatibility." + ) + if len(args) >= 1: + _name = args[0] + if len(args) >= 2: + _event_timestamp_column = args[1] + if len(args) >= 3: + _bootstrap_servers = args[2] + if len(args) >= 4: + _message_format = args[3] + if len(args) >= 5: + _topic = args[4] + + if _message_format is None: + raise ValueError("Message format must be specified for Kafka source") + super().__init__( - event_timestamp_column, - created_timestamp_column, - field_mapping, - date_partition_column, + event_timestamp_column=_event_timestamp_column, + created_timestamp_column=created_timestamp_column, + field_mapping=field_mapping, + date_partition_column=date_partition_column, + description=description, + tags=tags, + owner=owner, + name=_name, + timestamp_field=timestamp_field, ) - self._kafka_options = KafkaOptions( - bootstrap_servers=bootstrap_servers, - message_format=message_format, - topic=topic, + self.batch_source = batch_source + self.kafka_options = KafkaOptions( + bootstrap_servers=_bootstrap_servers, + message_format=_message_format, + topic=_topic, ) def __eq__(self, other): @@ -414,6 +434,9 @@ def __eq__(self, other): "Comparisons should only involve KafkaSource class objects." ) + if not super().__eq__(other): + return False + if ( self.kafka_options.bootstrap_servers != other.kafka_options.bootstrap_servers @@ -424,51 +447,234 @@ def __eq__(self, other): return True - @property - def kafka_options(self): - """ - Returns the kafka options of this data source - """ - return self._kafka_options - - @kafka_options.setter - def kafka_options(self, kafka_options): - """ - Sets the kafka options of this data source - """ - self._kafka_options = kafka_options + def __hash__(self): + return super().__hash__() @staticmethod def from_proto(data_source: DataSourceProto): return KafkaSource( + name=data_source.name, + event_timestamp_column=data_source.timestamp_field, field_mapping=dict(data_source.field_mapping), bootstrap_servers=data_source.kafka_options.bootstrap_servers, message_format=StreamFormat.from_proto( data_source.kafka_options.message_format ), topic=data_source.kafka_options.topic, - event_timestamp_column=data_source.event_timestamp_column, created_timestamp_column=data_source.created_timestamp_column, + timestamp_field=data_source.timestamp_field, date_partition_column=data_source.date_partition_column, + description=data_source.description, + tags=dict(data_source.tags), + owner=data_source.owner, + batch_source=DataSource.from_proto(data_source.batch_source) + if data_source.batch_source + else None, ) def to_proto(self) -> DataSourceProto: data_source_proto = DataSourceProto( + name=self.name, type=DataSourceProto.STREAM_KAFKA, field_mapping=self.field_mapping, kafka_options=self.kafka_options.to_proto(), + description=self.description, + tags=self.tags, + owner=self.owner, ) - data_source_proto.event_timestamp_column = self.event_timestamp_column + data_source_proto.timestamp_field = self.timestamp_field data_source_proto.created_timestamp_column = self.created_timestamp_column data_source_proto.date_partition_column = self.date_partition_column - + if self.batch_source: + data_source_proto.batch_source.MergeFrom(self.batch_source.to_proto()) return data_source_proto @staticmethod def source_datatype_to_feast_value_type() -> Callable[[str], ValueType]: return type_map.redshift_to_feast_value_type + def get_table_query_string(self) -> str: + raise NotImplementedError + + +class RequestSource(DataSource): + """ + RequestSource that can be used to provide input features for on demand transforms + + Attributes: + name: Name of the request data source + schema: Schema mapping from the input feature name to a ValueType + description: A human-readable description. + tags: A dictionary of key-value pairs to store arbitrary metadata. + owner: The owner of the request data source, typically the email of the primary + maintainer. + """ + + name: str + schema: List[Field] + description: str + tags: Dict[str, str] + owner: str + + def __init__( + self, + *args, + name: Optional[str] = None, + schema: Optional[Union[Dict[str, ValueType], List[Field]]] = None, + description: Optional[str] = "", + tags: Optional[Dict[str, str]] = None, + owner: Optional[str] = "", + ): + """Creates a RequestSource object.""" + positional_attributes = ["name", "schema"] + _name = name + _schema = schema + if args: + warnings.warn( + ( + "Request source parameters should be specified as a keyword argument instead of a positional arg." + "Feast 0.23+ will not support positional arguments to construct request sources" + ), + DeprecationWarning, + ) + if len(args) > len(positional_attributes): + raise ValueError( + f"Only {', '.join(positional_attributes)} are allowed as positional args when defining " + f"feature views, for backwards compatibility." + ) + if len(args) >= 1: + _name = args[0] + if len(args) >= 2: + _schema = args[1] + + super().__init__(name=_name, description=description, tags=tags, owner=owner) + if not _schema: + raise ValueError("Schema needs to be provided for Request Source") + if isinstance(_schema, Dict): + warnings.warn( + "Schema in RequestSource is changing type. The schema data type Dict[str, ValueType] is being deprecated in Feast 0.23. " + "Please use List[Field] instead for the schema", + DeprecationWarning, + ) + schemaList = [] + for key, valueType in _schema.items(): + schemaList.append( + Field(name=key, dtype=VALUE_TYPES_TO_FEAST_TYPES[valueType]) + ) + self.schema = schemaList + elif isinstance(_schema, List): + self.schema = _schema + else: + raise Exception( + "Schema type must be either dictionary or list, not " + + str(type(_schema)) + ) + + def validate(self, config: RepoConfig): + pass + + def get_table_column_names_and_types( + self, config: RepoConfig + ) -> Iterable[Tuple[str, str]]: + pass + + def __eq__(self, other): + if not isinstance(other, RequestSource): + raise TypeError( + "Comparisons should only involve RequestSource class objects." + ) + + if not super().__eq__(other): + return False + + if isinstance(self.schema, List) and isinstance(other.schema, List): + for field1, field2 in zip(self.schema, other.schema): + if field1 != field2: + return False + return True + else: + return False + + def __hash__(self): + return super().__hash__() + + @staticmethod + def from_proto(data_source: DataSourceProto): + + deprecated_schema = data_source.request_data_options.deprecated_schema + schema_pb = data_source.request_data_options.schema + + if deprecated_schema and not schema_pb: + warnings.warn( + "Schema in RequestSource is changing type. The schema data type Dict[str, ValueType] is being deprecated in Feast 0.23. " + "Please use List[Field] instead for the schema", + DeprecationWarning, + ) + dict_schema = {} + for key, val in deprecated_schema.items(): + dict_schema[key] = ValueType(val) + return RequestSource( + name=data_source.name, + schema=dict_schema, + description=data_source.description, + tags=dict(data_source.tags), + owner=data_source.owner, + ) + else: + list_schema = [] + for field_proto in schema_pb: + list_schema.append(Field.from_proto(field_proto)) + + return RequestSource( + name=data_source.name, + schema=list_schema, + description=data_source.description, + tags=dict(data_source.tags), + owner=data_source.owner, + ) + + def to_proto(self) -> DataSourceProto: + + schema_pb = [] + + if isinstance(self.schema, Dict): + for key, value in self.schema.items(): + schema_pb.append( + Field( + name=key, dtype=VALUE_TYPES_TO_FEAST_TYPES[value.value] + ).to_proto() + ) + else: + for field in self.schema: + schema_pb.append(field.to_proto()) + data_source_proto = DataSourceProto( + name=self.name, + type=DataSourceProto.REQUEST_SOURCE, + description=self.description, + tags=self.tags, + owner=self.owner, + ) + data_source_proto.request_data_options.schema.extend(schema_pb) + + return data_source_proto + + def get_table_query_string(self) -> str: + raise NotImplementedError + + @staticmethod + def source_datatype_to_feast_value_type() -> Callable[[str], ValueType]: + raise NotImplementedError + + +class RequestDataSource(RequestSource): + def __init__(self, *args, **kwargs): + warnings.warn( + "The 'RequestDataSource' class is deprecated and was renamed to RequestSource. Please use RequestSource instead. This class name will be removed in Feast 0.23.", + DeprecationWarning, + ) + super().__init__(*args, **kwargs) + class KinesisSource(DataSource): def validate(self, config: RepoConfig): @@ -482,50 +688,117 @@ def get_table_column_names_and_types( @staticmethod def from_proto(data_source: DataSourceProto): return KinesisSource( + name=data_source.name, + event_timestamp_column=data_source.timestamp_field, field_mapping=dict(data_source.field_mapping), record_format=StreamFormat.from_proto( data_source.kinesis_options.record_format ), region=data_source.kinesis_options.region, stream_name=data_source.kinesis_options.stream_name, - event_timestamp_column=data_source.event_timestamp_column, created_timestamp_column=data_source.created_timestamp_column, + timestamp_field=data_source.timestamp_field, date_partition_column=data_source.date_partition_column, + description=data_source.description, + tags=dict(data_source.tags), + owner=data_source.owner, + batch_source=DataSource.from_proto(data_source.batch_source) + if data_source.batch_source + else None, ) @staticmethod def source_datatype_to_feast_value_type() -> Callable[[str], ValueType]: pass + def get_table_query_string(self) -> str: + raise NotImplementedError + def __init__( self, - event_timestamp_column: str, - created_timestamp_column: str, - record_format: StreamFormat, - region: str, - stream_name: str, + *args, + name: Optional[str] = None, + event_timestamp_column: Optional[str] = "", + created_timestamp_column: Optional[str] = "", + record_format: Optional[StreamFormat] = None, + region: Optional[str] = "", + stream_name: Optional[str] = "", field_mapping: Optional[Dict[str, str]] = None, date_partition_column: Optional[str] = "", + description: Optional[str] = "", + tags: Optional[Dict[str, str]] = None, + owner: Optional[str] = "", + timestamp_field: Optional[str] = "", + batch_source: Optional[DataSource] = None, ): + positional_attributes = [ + "name", + "event_timestamp_column", + "created_timestamp_column", + "record_format", + "region", + "stream_name", + ] + _name = name + _event_timestamp_column = event_timestamp_column + _created_timestamp_column = created_timestamp_column + _record_format = record_format + _region = region or "" + _stream_name = stream_name or "" + if args: + warnings.warn( + ( + "Kinesis parameters should be specified as a keyword argument instead of a positional arg." + "Feast 0.23+ will not support positional arguments to construct kinesis sources" + ), + DeprecationWarning, + ) + if len(args) > len(positional_attributes): + raise ValueError( + f"Only {', '.join(positional_attributes)} are allowed as positional args when defining " + f"kinesis sources, for backwards compatibility." + ) + if len(args) >= 1: + _name = args[0] + if len(args) >= 2: + _event_timestamp_column = args[1] + if len(args) >= 3: + _created_timestamp_column = args[2] + if len(args) >= 4: + _record_format = args[3] + if len(args) >= 5: + _region = args[4] + if len(args) >= 6: + _stream_name = args[5] + + if _record_format is None: + raise ValueError("Record format must be specified for kinesis source") + super().__init__( - event_timestamp_column, - created_timestamp_column, - field_mapping, - date_partition_column, + name=_name, + event_timestamp_column=_event_timestamp_column, + created_timestamp_column=_created_timestamp_column, + field_mapping=field_mapping, + date_partition_column=date_partition_column, + description=description, + tags=tags, + owner=owner, + timestamp_field=timestamp_field, ) - self._kinesis_options = KinesisOptions( - record_format=record_format, region=region, stream_name=stream_name + self.batch_source = batch_source + self.kinesis_options = KinesisOptions( + record_format=_record_format, region=_region, stream_name=_stream_name ) def __eq__(self, other): - if other is None: - return False - if not isinstance(other, KinesisSource): raise TypeError( "Comparisons should only involve KinesisSource class objects." ) + if not super().__eq__(other): + return False + if ( self.kinesis_options.record_format != other.kinesis_options.record_format or self.kinesis_options.region != other.kinesis_options.region @@ -535,29 +808,142 @@ def __eq__(self, other): return True - @property - def kinesis_options(self): - """ - Returns the kinesis options of this data source - """ - return self._kinesis_options - - @kinesis_options.setter - def kinesis_options(self, kinesis_options): - """ - Sets the kinesis options of this data source - """ - self._kinesis_options = kinesis_options + def __hash__(self): + return super().__hash__() def to_proto(self) -> DataSourceProto: data_source_proto = DataSourceProto( + name=self.name, type=DataSourceProto.STREAM_KINESIS, field_mapping=self.field_mapping, kinesis_options=self.kinesis_options.to_proto(), + description=self.description, + tags=self.tags, + owner=self.owner, ) - data_source_proto.event_timestamp_column = self.event_timestamp_column + data_source_proto.timestamp_field = self.timestamp_field data_source_proto.created_timestamp_column = self.created_timestamp_column data_source_proto.date_partition_column = self.date_partition_column + if self.batch_source: + data_source_proto.batch_source.MergeFrom(self.batch_source.to_proto()) + + return data_source_proto + + +class PushSource(DataSource): + """ + A source that can be used to ingest features on request + """ + + # TODO(adchia): consider adding schema here in case where Feast manages pushing events to the offline store + # TODO(adchia): consider a "mode" to support pushing raw vs transformed events + batch_source: DataSource + + def __init__( + self, + *args, + name: Optional[str] = None, + batch_source: Optional[DataSource] = None, + description: Optional[str] = "", + tags: Optional[Dict[str, str]] = None, + owner: Optional[str] = "", + ): + """ + Creates a PushSource object. + Args: + name: Name of the push source + batch_source: The batch source that backs this push source. It's used when materializing from the offline + store to the online store, and when retrieving historical features. + description (optional): A human-readable description. + tags (optional): A dictionary of key-value pairs to store arbitrary metadata. + owner (optional): The owner of the data source, typically the email of the primary + maintainer. + + """ + positional_attributes = ["name", "batch_source"] + _name = name + _batch_source = batch_source + if args: + warnings.warn( + ( + "Push source parameters should be specified as a keyword argument instead of a positional arg." + "Feast 0.23+ will not support positional arguments to construct push sources" + ), + DeprecationWarning, + ) + if len(args) > len(positional_attributes): + raise ValueError( + f"Only {', '.join(positional_attributes)} are allowed as positional args when defining " + f"push sources, for backwards compatibility." + ) + if len(args) >= 1: + _name = args[0] + if len(args) >= 2: + _batch_source = args[1] + + super().__init__(name=_name, description=description, tags=tags, owner=owner) + if not _batch_source: + raise ValueError( + f"batch_source parameter is needed for push source {self.name}" + ) + self.batch_source = _batch_source + + def __eq__(self, other): + if not isinstance(other, PushSource): + raise TypeError("Comparisons should only involve PushSource class objects.") + + if not super().__eq__(other): + return False + + if self.batch_source != other.batch_source: + return False + + return True + + def __hash__(self): + return super().__hash__() + + def validate(self, config: RepoConfig): + pass + + def get_table_column_names_and_types( + self, config: RepoConfig + ) -> Iterable[Tuple[str, str]]: + pass + + @staticmethod + def from_proto(data_source: DataSourceProto): + assert data_source.HasField("batch_source") + batch_source = DataSource.from_proto(data_source.batch_source) + + return PushSource( + name=data_source.name, + batch_source=batch_source, + description=data_source.description, + tags=dict(data_source.tags), + owner=data_source.owner, + ) + + def to_proto(self) -> DataSourceProto: + batch_source_proto = None + if self.batch_source: + batch_source_proto = self.batch_source.to_proto() + + data_source_proto = DataSourceProto( + name=self.name, + type=DataSourceProto.PUSH_SOURCE, + description=self.description, + tags=self.tags, + owner=self.owner, + batch_source=batch_source_proto, + ) return data_source_proto + + def get_table_query_string(self) -> str: + raise NotImplementedError + + @staticmethod + def source_datatype_to_feast_value_type() -> Callable[[str], ValueType]: + raise NotImplementedError diff --git a/sdk/python/feast/protos/feast/core/__init__.py b/sdk/python/feast/diff/__init__.py similarity index 100% rename from sdk/python/feast/protos/feast/core/__init__.py rename to sdk/python/feast/diff/__init__.py diff --git a/sdk/python/feast/diff/infra_diff.py b/sdk/python/feast/diff/infra_diff.py new file mode 100644 index 0000000000..a09eaf39eb --- /dev/null +++ b/sdk/python/feast/diff/infra_diff.py @@ -0,0 +1,203 @@ +from dataclasses import dataclass +from typing import Generic, Iterable, List, Tuple, TypeVar + +from feast.diff.property_diff import PropertyDiff, TransitionType +from feast.infra.infra_object import ( + DATASTORE_INFRA_OBJECT_CLASS_TYPE, + DYNAMODB_INFRA_OBJECT_CLASS_TYPE, + SQLITE_INFRA_OBJECT_CLASS_TYPE, + InfraObject, +) +from feast.protos.feast.core.DatastoreTable_pb2 import ( + DatastoreTable as DatastoreTableProto, +) +from feast.protos.feast.core.DynamoDBTable_pb2 import ( + DynamoDBTable as DynamoDBTableProto, +) +from feast.protos.feast.core.InfraObject_pb2 import Infra as InfraProto +from feast.protos.feast.core.SqliteTable_pb2 import SqliteTable as SqliteTableProto + +InfraObjectProto = TypeVar( + "InfraObjectProto", DatastoreTableProto, DynamoDBTableProto, SqliteTableProto +) + + +@dataclass +class InfraObjectDiff(Generic[InfraObjectProto]): + name: str + infra_object_type: str + current_infra_object: InfraObjectProto + new_infra_object: InfraObjectProto + infra_object_property_diffs: List[PropertyDiff] + transition_type: TransitionType + + +@dataclass +class InfraDiff: + infra_object_diffs: List[InfraObjectDiff] + + def __init__(self): + self.infra_object_diffs = [] + + def update(self): + """Apply the infrastructure changes specified in this object.""" + for infra_object_diff in self.infra_object_diffs: + if infra_object_diff.transition_type in [ + TransitionType.DELETE, + TransitionType.UPDATE, + ]: + infra_object = InfraObject.from_proto( + infra_object_diff.current_infra_object + ) + infra_object.teardown() + elif infra_object_diff.transition_type in [ + TransitionType.CREATE, + TransitionType.UPDATE, + ]: + infra_object = InfraObject.from_proto( + infra_object_diff.new_infra_object + ) + infra_object.update() + + def to_string(self): + from colorama import Fore, Style + + log_string = "" + + message_action_map = { + TransitionType.CREATE: ("Created", Fore.GREEN), + TransitionType.DELETE: ("Deleted", Fore.RED), + TransitionType.UNCHANGED: ("Unchanged", Fore.LIGHTBLUE_EX), + TransitionType.UPDATE: ("Updated", Fore.YELLOW), + } + for infra_object_diff in self.infra_object_diffs: + if infra_object_diff.transition_type == TransitionType.UNCHANGED: + continue + action, color = message_action_map[infra_object_diff.transition_type] + log_string += f"{action} {infra_object_diff.infra_object_type} {Style.BRIGHT + color}{infra_object_diff.name}{Style.RESET_ALL}\n" + if infra_object_diff.transition_type == TransitionType.UPDATE: + for _p in infra_object_diff.infra_object_property_diffs: + log_string += f"\t{_p.property_name}: {Style.BRIGHT + color}{_p.val_existing}{Style.RESET_ALL} -> {Style.BRIGHT + Fore.LIGHTGREEN_EX}{_p.val_declared}{Style.RESET_ALL}\n" + + log_string = ( + f"{Style.BRIGHT + Fore.LIGHTBLUE_EX}No changes to infrastructure" + if not log_string + else log_string + ) + + return log_string + + +def tag_infra_proto_objects_for_keep_delete_add( + existing_objs: Iterable[InfraObjectProto], desired_objs: Iterable[InfraObjectProto] +) -> Tuple[ + Iterable[InfraObjectProto], Iterable[InfraObjectProto], Iterable[InfraObjectProto] +]: + existing_obj_names = {e.name for e in existing_objs} + desired_obj_names = {e.name for e in desired_objs} + + objs_to_add = [e for e in desired_objs if e.name not in existing_obj_names] + objs_to_keep = [e for e in desired_objs if e.name in existing_obj_names] + objs_to_delete = [e for e in existing_objs if e.name not in desired_obj_names] + + return objs_to_keep, objs_to_delete, objs_to_add + + +def diff_infra_protos( + current_infra_proto: InfraProto, new_infra_proto: InfraProto +) -> InfraDiff: + infra_diff = InfraDiff() + + infra_object_class_types_to_str = { + DATASTORE_INFRA_OBJECT_CLASS_TYPE: "datastore table", + DYNAMODB_INFRA_OBJECT_CLASS_TYPE: "dynamodb table", + SQLITE_INFRA_OBJECT_CLASS_TYPE: "sqlite table", + } + + for infra_object_class_type in infra_object_class_types_to_str: + current_infra_objects = get_infra_object_protos_by_type( + current_infra_proto, infra_object_class_type + ) + new_infra_objects = get_infra_object_protos_by_type( + new_infra_proto, infra_object_class_type + ) + ( + infra_objects_to_keep, + infra_objects_to_delete, + infra_objects_to_add, + ) = tag_infra_proto_objects_for_keep_delete_add( + current_infra_objects, new_infra_objects, + ) + + for e in infra_objects_to_add: + infra_diff.infra_object_diffs.append( + InfraObjectDiff( + e.name, + infra_object_class_types_to_str[infra_object_class_type], + None, + e, + [], + TransitionType.CREATE, + ) + ) + for e in infra_objects_to_delete: + infra_diff.infra_object_diffs.append( + InfraObjectDiff( + e.name, + infra_object_class_types_to_str[infra_object_class_type], + e, + None, + [], + TransitionType.DELETE, + ) + ) + for e in infra_objects_to_keep: + current_infra_object = [ + _e for _e in current_infra_objects if _e.name == e.name + ][0] + infra_diff.infra_object_diffs.append( + diff_between( + current_infra_object, + e, + infra_object_class_types_to_str[infra_object_class_type], + ) + ) + + return infra_diff + + +def get_infra_object_protos_by_type( + infra_proto: InfraProto, infra_object_class_type: str +) -> List[InfraObjectProto]: + return [ + InfraObject.from_infra_object_proto(infra_object).to_proto() + for infra_object in infra_proto.infra_objects + if infra_object.infra_object_class_type == infra_object_class_type + ] + + +FIELDS_TO_IGNORE = {"project"} + + +def diff_between( + current: InfraObjectProto, new: InfraObjectProto, infra_object_type: str +) -> InfraObjectDiff: + assert current.DESCRIPTOR.full_name == new.DESCRIPTOR.full_name + property_diffs = [] + transition: TransitionType = TransitionType.UNCHANGED + if current != new: + for _field in current.DESCRIPTOR.fields: + if _field.name in FIELDS_TO_IGNORE: + continue + if getattr(current, _field.name) != getattr(new, _field.name): + transition = TransitionType.UPDATE + property_diffs.append( + PropertyDiff( + _field.name, + getattr(current, _field.name), + getattr(new, _field.name), + ) + ) + return InfraObjectDiff( + new.name, infra_object_type, current, new, property_diffs, transition, + ) diff --git a/sdk/python/feast/diff/property_diff.py b/sdk/python/feast/diff/property_diff.py new file mode 100644 index 0000000000..9136cada50 --- /dev/null +++ b/sdk/python/feast/diff/property_diff.py @@ -0,0 +1,17 @@ +from dataclasses import dataclass +from enum import Enum + + +@dataclass +class PropertyDiff: + property_name: str + val_existing: str + val_declared: str + + +class TransitionType(Enum): + UNKNOWN = 0 + CREATE = 1 + DELETE = 2 + UPDATE = 3 + UNCHANGED = 4 diff --git a/sdk/python/feast/diff/registry_diff.py b/sdk/python/feast/diff/registry_diff.py new file mode 100644 index 0000000000..b2caec2b68 --- /dev/null +++ b/sdk/python/feast/diff/registry_diff.py @@ -0,0 +1,333 @@ +from dataclasses import dataclass +from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, TypeVar, cast + +from feast.base_feature_view import BaseFeatureView +from feast.data_source import DataSource +from feast.diff.property_diff import PropertyDiff, TransitionType +from feast.entity import Entity +from feast.feast_object import FeastObject, FeastObjectSpecProto +from feast.feature_service import FeatureService +from feast.feature_view import DUMMY_ENTITY_NAME +from feast.protos.feast.core.DataSource_pb2 import DataSource as DataSourceProto +from feast.protos.feast.core.Entity_pb2 import Entity as EntityProto +from feast.protos.feast.core.FeatureService_pb2 import ( + FeatureService as FeatureServiceProto, +) +from feast.protos.feast.core.FeatureView_pb2 import FeatureView as FeatureViewProto +from feast.protos.feast.core.OnDemandFeatureView_pb2 import ( + OnDemandFeatureView as OnDemandFeatureViewProto, +) +from feast.protos.feast.core.RequestFeatureView_pb2 import ( + RequestFeatureView as RequestFeatureViewProto, +) +from feast.registry import FEAST_OBJECT_TYPES, FeastObjectType, Registry +from feast.repo_contents import RepoContents + + +@dataclass +class FeastObjectDiff: + name: str + feast_object_type: FeastObjectType + current_feast_object: Optional[FeastObject] + new_feast_object: Optional[FeastObject] + feast_object_property_diffs: List[PropertyDiff] + transition_type: TransitionType + + +@dataclass +class RegistryDiff: + feast_object_diffs: List[FeastObjectDiff] + + def __init__(self): + self.feast_object_diffs = [] + + def add_feast_object_diff(self, feast_object_diff: FeastObjectDiff): + self.feast_object_diffs.append(feast_object_diff) + + def to_string(self): + from colorama import Fore, Style + + log_string = "" + + message_action_map = { + TransitionType.CREATE: ("Created", Fore.GREEN), + TransitionType.DELETE: ("Deleted", Fore.RED), + TransitionType.UNCHANGED: ("Unchanged", Fore.LIGHTBLUE_EX), + TransitionType.UPDATE: ("Updated", Fore.YELLOW), + } + for feast_object_diff in self.feast_object_diffs: + if feast_object_diff.name == DUMMY_ENTITY_NAME: + continue + if feast_object_diff.transition_type == TransitionType.UNCHANGED: + continue + if feast_object_diff.feast_object_type == FeastObjectType.DATA_SOURCE: + # TODO(adchia): Print statements out starting in Feast 0.21 + continue + action, color = message_action_map[feast_object_diff.transition_type] + log_string += f"{action} {feast_object_diff.feast_object_type.value} {Style.BRIGHT + color}{feast_object_diff.name}{Style.RESET_ALL}\n" + if feast_object_diff.transition_type == TransitionType.UPDATE: + for _p in feast_object_diff.feast_object_property_diffs: + log_string += f"\t{_p.property_name}: {Style.BRIGHT + color}{_p.val_existing}{Style.RESET_ALL} -> {Style.BRIGHT + Fore.LIGHTGREEN_EX}{_p.val_declared}{Style.RESET_ALL}\n" + + log_string = ( + f"{Style.BRIGHT + Fore.LIGHTBLUE_EX}No changes to registry" + if not log_string + else log_string + ) + + return log_string + + +def tag_objects_for_keep_delete_update_add( + existing_objs: Iterable[FeastObject], desired_objs: Iterable[FeastObject] +) -> Tuple[Set[FeastObject], Set[FeastObject], Set[FeastObject], Set[FeastObject]]: + # TODO(adchia): Remove the "if X.name" condition when data sources are forced to have names + existing_obj_names = {e.name for e in existing_objs if e.name} + desired_objs = [obj for obj in desired_objs if obj.name] + existing_objs = [obj for obj in existing_objs if obj.name] + desired_obj_names = {e.name for e in desired_objs if e.name} + + objs_to_add = {e for e in desired_objs if e.name not in existing_obj_names} + objs_to_update = {e for e in desired_objs if e.name in existing_obj_names} + objs_to_keep = {e for e in existing_objs if e.name in desired_obj_names} + objs_to_delete = {e for e in existing_objs if e.name not in desired_obj_names} + + return objs_to_keep, objs_to_delete, objs_to_update, objs_to_add + + +FeastObjectProto = TypeVar( + "FeastObjectProto", + DataSourceProto, + EntityProto, + FeatureViewProto, + FeatureServiceProto, + OnDemandFeatureViewProto, + RequestFeatureViewProto, +) + + +FIELDS_TO_IGNORE = {"project"} + + +def diff_registry_objects( + current: FeastObject, new: FeastObject, object_type: FeastObjectType +) -> FeastObjectDiff: + current_proto = current.to_proto() + new_proto = new.to_proto() + assert current_proto.DESCRIPTOR.full_name == new_proto.DESCRIPTOR.full_name + property_diffs = [] + transition: TransitionType = TransitionType.UNCHANGED + + current_spec: FeastObjectSpecProto + new_spec: FeastObjectSpecProto + if isinstance(current_proto, DataSourceProto) or isinstance( + new_proto, DataSourceProto + ): + assert type(current_proto) == type(new_proto) + current_spec = cast(DataSourceProto, current_proto) + new_spec = cast(DataSourceProto, new_proto) + else: + current_spec = current_proto.spec + new_spec = new_proto.spec + if current_spec != new_spec: + for _field in current_spec.DESCRIPTOR.fields: + if _field.name in FIELDS_TO_IGNORE: + continue + if getattr(current_spec, _field.name) != getattr(new_spec, _field.name): + transition = TransitionType.UPDATE + property_diffs.append( + PropertyDiff( + _field.name, + getattr(current_spec, _field.name), + getattr(new_spec, _field.name), + ) + ) + return FeastObjectDiff( + name=new_spec.name, + feast_object_type=object_type, + current_feast_object=current, + new_feast_object=new, + feast_object_property_diffs=property_diffs, + transition_type=transition, + ) + + +def extract_objects_for_keep_delete_update_add( + registry: Registry, current_project: str, desired_repo_contents: RepoContents, +) -> Tuple[ + Dict[FeastObjectType, Set[FeastObject]], + Dict[FeastObjectType, Set[FeastObject]], + Dict[FeastObjectType, Set[FeastObject]], + Dict[FeastObjectType, Set[FeastObject]], +]: + """ + Returns the objects in the registry that must be modified to achieve the desired repo state. + + Args: + registry: The registry storing the current repo state. + current_project: The Feast project whose objects should be compared. + desired_repo_contents: The desired repo state. + """ + objs_to_keep = {} + objs_to_delete = {} + objs_to_update = {} + objs_to_add = {} + + registry_object_type_to_objects: Dict[ + FeastObjectType, List[Any] + ] = FeastObjectType.get_objects_from_registry(registry, current_project) + registry_object_type_to_repo_contents: Dict[ + FeastObjectType, List[Any] + ] = FeastObjectType.get_objects_from_repo_contents(desired_repo_contents) + + for object_type in FEAST_OBJECT_TYPES: + ( + to_keep, + to_delete, + to_update, + to_add, + ) = tag_objects_for_keep_delete_update_add( + registry_object_type_to_objects[object_type], + registry_object_type_to_repo_contents[object_type], + ) + + objs_to_keep[object_type] = to_keep + objs_to_delete[object_type] = to_delete + objs_to_update[object_type] = to_update + objs_to_add[object_type] = to_add + + return objs_to_keep, objs_to_delete, objs_to_update, objs_to_add + + +def diff_between( + registry: Registry, current_project: str, desired_repo_contents: RepoContents, +) -> RegistryDiff: + """ + Returns the difference between the current and desired repo states. + + Args: + registry: The registry storing the current repo state. + current_project: The Feast project for which the diff is being computed. + desired_repo_contents: The desired repo state. + """ + diff = RegistryDiff() + + ( + objs_to_keep, + objs_to_delete, + objs_to_update, + objs_to_add, + ) = extract_objects_for_keep_delete_update_add( + registry, current_project, desired_repo_contents + ) + + for object_type in FEAST_OBJECT_TYPES: + objects_to_keep = objs_to_keep[object_type] + objects_to_delete = objs_to_delete[object_type] + objects_to_update = objs_to_update[object_type] + objects_to_add = objs_to_add[object_type] + + for e in objects_to_add: + diff.add_feast_object_diff( + FeastObjectDiff( + name=e.name, + feast_object_type=object_type, + current_feast_object=None, + new_feast_object=e, + feast_object_property_diffs=[], + transition_type=TransitionType.CREATE, + ) + ) + for e in objects_to_delete: + diff.add_feast_object_diff( + FeastObjectDiff( + name=e.name, + feast_object_type=object_type, + current_feast_object=e, + new_feast_object=None, + feast_object_property_diffs=[], + transition_type=TransitionType.DELETE, + ) + ) + for e in objects_to_update: + current_obj = [_e for _e in objects_to_keep if _e.name == e.name][0] + diff.add_feast_object_diff( + diff_registry_objects(current_obj, e, object_type) + ) + + return diff + + +def apply_diff_to_registry( + registry: Registry, registry_diff: RegistryDiff, project: str, commit: bool = True +): + """ + Applies the given diff to the given Feast project in the registry. + + Args: + registry: The registry to be updated. + registry_diff: The diff to apply. + project: Feast project to be updated. + commit: Whether the change should be persisted immediately + """ + for feast_object_diff in registry_diff.feast_object_diffs: + # There is no need to delete the object on an update, since applying the new object + # will automatically delete the existing object. + if feast_object_diff.transition_type == TransitionType.DELETE: + if feast_object_diff.feast_object_type == FeastObjectType.ENTITY: + entity_obj = cast(Entity, feast_object_diff.current_feast_object) + registry.delete_entity(entity_obj.name, project, commit=False) + elif feast_object_diff.feast_object_type == FeastObjectType.FEATURE_SERVICE: + feature_service_obj = cast( + FeatureService, feast_object_diff.current_feast_object + ) + registry.delete_feature_service( + feature_service_obj.name, project, commit=False + ) + elif feast_object_diff.feast_object_type in [ + FeastObjectType.FEATURE_VIEW, + FeastObjectType.ON_DEMAND_FEATURE_VIEW, + FeastObjectType.REQUEST_FEATURE_VIEW, + ]: + feature_view_obj = cast( + BaseFeatureView, feast_object_diff.current_feast_object + ) + registry.delete_feature_view( + feature_view_obj.name, project, commit=False, + ) + + if feast_object_diff.transition_type in [ + TransitionType.CREATE, + TransitionType.UPDATE, + ]: + if feast_object_diff.feast_object_type == FeastObjectType.DATA_SOURCE: + registry.apply_data_source( + cast(DataSource, feast_object_diff.new_feast_object), + project, + commit=False, + ) + if feast_object_diff.feast_object_type == FeastObjectType.ENTITY: + registry.apply_entity( + cast(Entity, feast_object_diff.new_feast_object), + project, + commit=False, + ) + elif feast_object_diff.feast_object_type == FeastObjectType.FEATURE_SERVICE: + registry.apply_feature_service( + cast(FeatureService, feast_object_diff.new_feast_object), + project, + commit=False, + ) + elif feast_object_diff.feast_object_type in [ + FeastObjectType.FEATURE_VIEW, + FeastObjectType.ON_DEMAND_FEATURE_VIEW, + FeastObjectType.REQUEST_FEATURE_VIEW, + ]: + registry.apply_feature_view( + cast(BaseFeatureView, feast_object_diff.new_feast_object), + project, + commit=False, + ) + + if commit: + registry.commit() diff --git a/sdk/python/feast/protos/feast/serving/__init__.py b/sdk/python/feast/dqm/__init__.py similarity index 100% rename from sdk/python/feast/protos/feast/serving/__init__.py rename to sdk/python/feast/dqm/__init__.py diff --git a/sdk/python/feast/dqm/errors.py b/sdk/python/feast/dqm/errors.py new file mode 100644 index 0000000000..c4179f72b3 --- /dev/null +++ b/sdk/python/feast/dqm/errors.py @@ -0,0 +1,13 @@ +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from .profilers.profiler import ValidationReport + + +class ValidationFailed(Exception): + def __init__(self, validation_report: "ValidationReport"): + self.validation_report = validation_report + + @property + def report(self) -> "ValidationReport": + return self.validation_report diff --git a/sdk/python/feast/protos/feast/storage/__init__.py b/sdk/python/feast/dqm/profilers/__init__.py similarity index 100% rename from sdk/python/feast/protos/feast/storage/__init__.py rename to sdk/python/feast/dqm/profilers/__init__.py diff --git a/sdk/python/feast/dqm/profilers/ge_profiler.py b/sdk/python/feast/dqm/profilers/ge_profiler.py new file mode 100644 index 0000000000..f1780754de --- /dev/null +++ b/sdk/python/feast/dqm/profilers/ge_profiler.py @@ -0,0 +1,162 @@ +import json +from typing import Any, Callable, Dict, List + +import dill +import great_expectations as ge +import numpy as np +import pandas as pd +from great_expectations.core import ExpectationSuite +from great_expectations.dataset import PandasDataset +from great_expectations.profile.base import ProfilerTypeMapping + +from feast.dqm.profilers.profiler import ( + Profile, + Profiler, + ValidationError, + ValidationReport, +) +from feast.protos.feast.core.ValidationProfile_pb2 import ( + GEValidationProfile as GEValidationProfileProto, +) +from feast.protos.feast.core.ValidationProfile_pb2 import ( + GEValidationProfiler as GEValidationProfilerProto, +) + + +def _prepare_dataset(dataset: PandasDataset) -> PandasDataset: + dataset_copy = dataset.copy(deep=True) + + for column in dataset.columns: + if dataset.expect_column_values_to_be_in_type_list( + column, type_list=sorted(list(ProfilerTypeMapping.DATETIME_TYPE_NAMES)) + ).success: + # GE cannot parse Timestamp or other pandas datetime time + dataset_copy[column] = dataset[column].dt.strftime("%Y-%m-%dT%H:%M:%S") + + if dataset[column].dtype == np.float32: + # GE converts expectation arguments into native Python float + # This could cause error on comparison => so better to convert to double prematurely + dataset_copy[column] = dataset[column].astype(np.float64) + + return dataset_copy + + +class GEProfile(Profile): + """ + GEProfile is an implementation of abstract Profile for integration with Great Expectations. + It executes validation by applying expectations from ExpectationSuite instance to a given dataset. + """ + + expectation_suite: ExpectationSuite + + def __init__(self, expectation_suite: ExpectationSuite): + self.expectation_suite = expectation_suite + + def validate(self, df: pd.DataFrame) -> "GEValidationReport": + """ + Validate provided dataframe against GE expectation suite. + 1. Pandas dataframe is converted into PandasDataset (GE type) + 2. Some fixes applied to the data to avoid crashes inside GE (see _prepare_dataset) + 3. Each expectation from ExpectationSuite instance tested against resulting dataset + + Return GEValidationReport, which parses great expectation's schema into list of generic ValidationErrors. + """ + dataset = PandasDataset(df) + + dataset = _prepare_dataset(dataset) + + results = ge.validate( + dataset, expectation_suite=self.expectation_suite, result_format="COMPLETE" + ) + return GEValidationReport(results) + + def to_proto(self): + return GEValidationProfileProto( + expectation_suite=json.dumps(self.expectation_suite.to_json_dict()).encode() + ) + + @classmethod + def from_proto(cls, proto: GEValidationProfileProto) -> "GEProfile": + return GEProfile( + expectation_suite=ExpectationSuite(**json.loads(proto.expectation_suite)) + ) + + def __repr__(self): + expectations = json.dumps( + [e.to_json_dict() for e in self.expectation_suite.expectations], indent=2 + ) + return f"" + + +class GEProfiler(Profiler): + """ + GEProfiler is an implementation of abstract Profiler for integration with Great Expectations. + It wraps around user defined profiler that should accept dataset (in a form of pandas dataframe) + and return ExpectationSuite. + """ + + def __init__( + self, user_defined_profiler: Callable[[pd.DataFrame], ExpectationSuite] + ): + self.user_defined_profiler = user_defined_profiler + + def analyze_dataset(self, df: pd.DataFrame) -> Profile: + """ + Generate GEProfile with ExpectationSuite (set of expectations) + from a given pandas dataframe by applying user defined profiler. + + Some fixes are also applied to the dataset (see _prepare_dataset function) to make it compatible with GE. + + Return GEProfile + """ + dataset = PandasDataset(df) + + dataset = _prepare_dataset(dataset) + + return GEProfile(expectation_suite=self.user_defined_profiler(dataset)) + + def to_proto(self): + return GEValidationProfilerProto( + profiler=GEValidationProfilerProto.UserDefinedProfiler( + body=dill.dumps(self.user_defined_profiler, recurse=True) + ) + ) + + @classmethod + def from_proto(cls, proto: GEValidationProfilerProto) -> "GEProfiler": + return GEProfiler(user_defined_profiler=dill.loads(proto.profiler.body)) + + +class GEValidationReport(ValidationReport): + def __init__(self, validation_result: Dict[Any, Any]): + self._validation_result = validation_result + + @property + def is_success(self) -> bool: + return self._validation_result["success"] + + @property + def errors(self) -> List["ValidationError"]: + return [ + ValidationError( + check_name=res.expectation_config.expectation_type, + column_name=res.expectation_config.kwargs["column"], + check_config=res.expectation_config.kwargs, + missing_count=res["result"].get("missing_count"), + missing_percent=res["result"].get("missing_percent"), + ) + for res in self._validation_result["results"] + if not res["success"] + ] + + def __repr__(self): + failed_expectations = [ + res.to_json_dict() + for res in self._validation_result["results"] + if not res["success"] + ] + return json.dumps(failed_expectations, indent=2) + + +def ge_profiler(func): + return GEProfiler(user_defined_profiler=func) diff --git a/sdk/python/feast/dqm/profilers/profiler.py b/sdk/python/feast/dqm/profilers/profiler.py new file mode 100644 index 0000000000..5d2e9d36bc --- /dev/null +++ b/sdk/python/feast/dqm/profilers/profiler.py @@ -0,0 +1,88 @@ +import abc +from typing import Any, List, Optional + +import pandas as pd + + +class Profile: + @abc.abstractmethod + def validate(self, dataset: pd.DataFrame) -> "ValidationReport": + """ + Run set of rules / expectations from current profile against given dataset. + + Return ValidationReport + """ + ... + + @abc.abstractmethod + def to_proto(self): + ... + + @classmethod + @abc.abstractmethod + def from_proto(cls, proto) -> "Profile": + ... + + +class Profiler: + @abc.abstractmethod + def analyze_dataset(self, dataset: pd.DataFrame) -> Profile: + """ + Generate Profile object with dataset's characteristics (with rules / expectations) + from given dataset (as pandas dataframe). + """ + ... + + @abc.abstractmethod + def to_proto(self): + ... + + @classmethod + @abc.abstractmethod + def from_proto(cls, proto) -> "Profiler": + ... + + +class ValidationReport: + @property + @abc.abstractmethod + def is_success(self) -> bool: + """ + Return whether validation was successful + """ + ... + + @property + @abc.abstractmethod + def errors(self) -> List["ValidationError"]: + """ + Return list of ValidationErrors if validation failed (is_success = false) + """ + ... + + +class ValidationError: + check_name: str + column_name: str + + check_config: Optional[Any] + + missing_count: Optional[int] + missing_percent: Optional[float] + + def __init__( + self, + check_name: str, + column_name: str, + check_config: Optional[Any] = None, + missing_count: Optional[int] = None, + missing_percent: Optional[float] = None, + ): + self.check_name = check_name + self.column_name = column_name + self.check_config = check_config + self.missing_count = missing_count + self.missing_percent = missing_percent + + def __repr__(self): + return f"" diff --git a/sdk/python/feast/driver_test_data.py b/sdk/python/feast/driver_test_data.py index ea0921bf04..117bfcbd9c 100644 --- a/sdk/python/feast/driver_test_data.py +++ b/sdk/python/feast/driver_test_data.py @@ -1,11 +1,14 @@ # This module generates dummy data to be used for tests and examples. +import itertools from enum import Enum import numpy as np import pandas as pd from pytz import FixedOffset, timezone, utc -from feast.infra.provider import DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL +from feast.infra.offline_stores.offline_utils import ( + DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL, +) class EventTimestampType(Enum): @@ -27,61 +30,48 @@ def _convert_event_timestamp(event_timestamp: pd.Timestamp, t: EventTimestampTyp def create_orders_df( - customers, - drivers, - start_date, - end_date, - order_count, - infer_event_timestamp_col=False, + customers, drivers, start_date, end_date, order_count, locations=None, ) -> pd.DataFrame: """ - Example df generated by this function: + Example df generated by this function (if locations): - | order_id | driver_id | customer_id | order_is_success | event_timestamp | - +----------+-----------+-------------+------------------+---------------------+ - | 100 | 5004 | 1007 | 0 | 2021-03-10 19:31:15 | - | 101 | 5003 | 1006 | 0 | 2021-03-11 22:02:50 | - | 102 | 5010 | 1005 | 0 | 2021-03-13 00:34:24 | - | 103 | 5010 | 1001 | 1 | 2021-03-14 03:05:59 | + | order_id | driver_id | customer_id | origin_id | destination_id | order_is_success | event_timestamp | + +----------+-----------+-------------+-----------+----------------+------------------+---------------------+ + | 100 | 5004 | 1007 | 1 | 18 | 0 | 2021-03-10 19:31:15 | + | 101 | 5003 | 1006 | 24 | 42 | 0 | 2021-03-11 22:02:50 | + | 102 | 5010 | 1005 | 19 | 12 | 0 | 2021-03-13 00:34:24 | + | 103 | 5010 | 1001 | 35 | 8 | 1 | 2021-03-14 03:05:59 | """ df = pd.DataFrame() df["order_id"] = [order_id for order_id in range(100, 100 + order_count)] df["driver_id"] = np.random.choice(drivers, order_count) df["customer_id"] = np.random.choice(customers, order_count) + if locations: + location_pairs = np.array(list(itertools.permutations(locations, 2))) + locations_sample = location_pairs[ + np.random.choice(len(location_pairs), order_count) + ].T + df["origin_id"] = locations_sample[0] + df["destination_id"] = locations_sample[1] df["order_is_success"] = np.random.randint(0, 2, size=order_count).astype(np.int32) - - if infer_event_timestamp_col: - df["e_ts"] = [ - _convert_event_timestamp( - pd.Timestamp(dt, unit="ms", tz="UTC").round("ms"), - EventTimestampType(3), - ) - for idx, dt in enumerate( - pd.date_range(start=start_date, end=end_date, periods=order_count) - ) - ] - df.sort_values( - by=["e_ts", "order_id", "driver_id", "customer_id"], inplace=True, + df[DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL] = [ + _convert_event_timestamp( + pd.Timestamp(dt, unit="ms", tz="UTC").round("ms"), + EventTimestampType(idx % 4), ) - else: - df[DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL] = [ - _convert_event_timestamp( - pd.Timestamp(dt, unit="ms", tz="UTC").round("ms"), - EventTimestampType(idx % 4), - ) - for idx, dt in enumerate( - pd.date_range(start=start_date, end=end_date, periods=order_count) - ) - ] - df.sort_values( - by=[ - DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL, - "order_id", - "driver_id", - "customer_id", - ], - inplace=True, + for idx, dt in enumerate( + pd.date_range(start=start_date, end=end_date, periods=order_count) ) + ] + df.sort_values( + by=[ + DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL, + "order_id", + "driver_id", + "customer_id", + ], + inplace=True, + ) return df @@ -89,7 +79,7 @@ def create_driver_hourly_stats_df(drivers, start_date, end_date) -> pd.DataFrame """ Example df generated by this function: - | datetime | driver_id | conv_rate | acc_rate | avg_daily_trips | created | + | event_timestamp | driver_id | conv_rate | acc_rate | avg_daily_trips | created | |------------------+-----------+-----------+----------+-----------------+------------------| | 2021-03-17 19:31 | 5010 | 0.229297 | 0.685843 | 861 | 2021-03-24 19:34 | | 2021-03-17 20:31 | 5010 | 0.781655 | 0.861280 | 769 | 2021-03-24 19:34 | @@ -105,7 +95,7 @@ def create_driver_hourly_stats_df(drivers, start_date, end_date) -> pd.DataFrame """ df_hourly = pd.DataFrame( { - "datetime": [ + "event_timestamp": [ pd.Timestamp(dt, unit="ms", tz="UTC").round("ms") for dt in pd.date_range( start=start_date, end=end_date, freq="1H", closed="left" @@ -127,7 +117,7 @@ def create_driver_hourly_stats_df(drivers, start_date, end_date) -> pd.DataFrame df_all_drivers = pd.concat([df_hourly_copy, df_all_drivers]) df_all_drivers.reset_index(drop=True, inplace=True) - rows = df_all_drivers["datetime"].count() + rows = df_all_drivers["event_timestamp"].count() df_all_drivers["conv_rate"] = np.random.random(size=rows).astype(np.float32) df_all_drivers["acc_rate"] = np.random.random(size=rows).astype(np.float32) @@ -150,7 +140,7 @@ def create_customer_daily_profile_df(customers, start_date, end_date) -> pd.Data """ Example df generated by this function: - | datetime | customer_id | current_balance | avg_passenger_count | lifetime_trip_count | created | + | event_timestamp | customer_id | current_balance | avg_passenger_count | lifetime_trip_count | created | |------------------+-------------+-----------------+---------------------+---------------------+------------------| | 2021-03-17 19:31 | 1010 | 0.889188 | 0.049057 | 412 | 2021-03-24 19:38 | | 2021-03-18 19:31 | 1010 | 0.979273 | 0.212630 | 639 | 2021-03-24 19:38 | @@ -166,7 +156,7 @@ def create_customer_daily_profile_df(customers, start_date, end_date) -> pd.Data """ df_daily = pd.DataFrame( { - "datetime": [ + "event_timestamp": [ pd.Timestamp(dt, unit="ms", tz="UTC").round("ms") for dt in pd.date_range( start=start_date, end=end_date, freq="1D", closed="left" @@ -183,7 +173,7 @@ def create_customer_daily_profile_df(customers, start_date, end_date) -> pd.Data df_all_customers.reset_index(drop=True, inplace=True) - rows = df_all_customers["datetime"].count() + rows = df_all_customers["event_timestamp"].count() df_all_customers["current_balance"] = np.random.random(size=rows).astype(np.float32) df_all_customers["avg_passenger_count"] = np.random.random(size=rows).astype( @@ -196,3 +186,107 @@ def create_customer_daily_profile_df(customers, start_date, end_date) -> pd.Data # TODO: Remove created timestamp in order to test whether its really optional df_all_customers["created"] = pd.to_datetime(pd.Timestamp.now(tz=None).round("ms")) return df_all_customers + + +def create_location_stats_df(locations, start_date, end_date) -> pd.DataFrame: + """ + Example df generated by this function: + + | event_timestamp | location_id | temperature | created | + +------------------+-------------+-------------+------------------+ + | 2021-03-17 19:31 | 1 | 74 | 2021-03-24 19:38 | + | 2021-03-17 20:31 | 24 | 63 | 2021-03-24 19:38 | + | 2021-03-17 21:31 | 19 | 65 | 2021-03-24 19:38 | + | 2021-03-17 22:31 | 35 | 86 | 2021-03-24 19:38 | + """ + df_hourly = pd.DataFrame( + { + "event_timestamp": [ + pd.Timestamp(dt, unit="ms", tz="UTC").round("ms") + for dt in pd.date_range( + start=start_date, end=end_date, freq="1H", closed="left" + ) + ] + } + ) + df_all_locations = pd.DataFrame() + + for location in locations: + df_hourly_copy = df_hourly.copy() + df_hourly_copy["location_id"] = location + df_all_locations = pd.concat([df_hourly_copy, df_all_locations]) + + df_all_locations.reset_index(drop=True, inplace=True) + rows = df_all_locations["event_timestamp"].count() + + df_all_locations["temperature"] = np.random.randint(50, 100, size=rows).astype( + np.int32 + ) + + # TODO: Remove created timestamp in order to test whether its really optional + df_all_locations["created"] = pd.to_datetime(pd.Timestamp.now(tz=None).round("ms")) + return df_all_locations + + +def create_global_daily_stats_df(start_date, end_date) -> pd.DataFrame: + """ + Example df generated by this function: + + | event_timestamp | num_rides | avg_ride_length | created | + |------------------+-------------+-----------------+------------------| + | 2021-03-17 19:00 | 99 | 0.889188 | 2021-03-24 19:38 | + | 2021-03-18 19:00 | 52 | 0.979273 | 2021-03-24 19:38 | + | 2021-03-19 19:00 | 66 | 0.976549 | 2021-03-24 19:38 | + | 2021-03-20 19:00 | 84 | 0.273697 | 2021-03-24 19:38 | + | 2021-03-21 19:00 | 89 | 0.438262 | 2021-03-24 19:38 | + | | ... | ... | | + | 2021-03-24 19:00 | 54 | 0.738860 | 2021-03-24 19:38 | + | 2021-03-25 19:00 | 58 | 0.848397 | 2021-03-24 19:38 | + | 2021-03-26 19:00 | 69 | 0.301552 | 2021-03-24 19:38 | + | 2021-03-27 19:00 | 63 | 0.943030 | 2021-03-24 19:38 | + | 2021-03-28 19:00 | 79 | 0.354919 | 2021-03-24 19:38 | + """ + df_daily = pd.DataFrame( + { + "event_timestamp": [ + pd.Timestamp(dt, unit="ms", tz="UTC").round("ms") + for dt in pd.date_range( + start=start_date, end=end_date, freq="1D", closed="left" + ) + ] + } + ) + rows = df_daily["event_timestamp"].count() + + df_daily["num_rides"] = np.random.randint(50, 100, size=rows).astype(np.int32) + df_daily["avg_ride_length"] = np.random.random(size=rows).astype(np.float32) + + # TODO: Remove created timestamp in order to test whether its really optional + df_daily["created"] = pd.to_datetime(pd.Timestamp.now(tz=None).round("ms")) + return df_daily + + +def create_field_mapping_df(start_date, end_date) -> pd.DataFrame: + """ + Example df generated by this function: + | event_timestamp | column_name | created | + |------------------+-------------+------------------| + | 2021-03-17 19:00 | 99 | 2021-03-24 19:38 | + | 2021-03-17 19:00 | 22 | 2021-03-24 19:38 | + | 2021-03-17 19:00 | 7 | 2021-03-24 19:38 | + | 2021-03-17 19:00 | 45 | 2021-03-24 19:38 | + """ + size = 10 + df = pd.DataFrame() + df["column_name"] = np.random.randint(1, 100, size=size).astype(np.int32) + df[DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL] = [ + _convert_event_timestamp( + pd.Timestamp(dt, unit="ms", tz="UTC").round("ms"), + EventTimestampType(idx % 4), + ) + for idx, dt in enumerate( + pd.date_range(start=start_date, end=end_date, periods=size) + ) + ] + df["created"] = pd.to_datetime(pd.Timestamp.now(tz=None).round("ms")) + return df diff --git a/sdk/python/feast/protos/feast/types/__init__.py b/sdk/python/feast/embedded_go/lib/__init__.py similarity index 100% rename from sdk/python/feast/protos/feast/types/__init__.py rename to sdk/python/feast/embedded_go/lib/__init__.py diff --git a/sdk/python/feast/embedded_go/online_features_service.py b/sdk/python/feast/embedded_go/online_features_service.py new file mode 100644 index 0000000000..410af1d8fe --- /dev/null +++ b/sdk/python/feast/embedded_go/online_features_service.py @@ -0,0 +1,233 @@ +from functools import partial +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union + +import pyarrow as pa +from google.protobuf.timestamp_pb2 import Timestamp +from pyarrow.cffi import ffi + +from feast.errors import ( + FeatureNameCollisionError, + RequestDataNotFoundInEntityRowsException, +) +from feast.feature_service import FeatureService +from feast.online_response import OnlineResponse +from feast.protos.feast.serving.ServingService_pb2 import GetOnlineFeaturesResponse +from feast.protos.feast.types import Value_pb2 +from feast.repo_config import RepoConfig +from feast.types import from_value_type +from feast.value_type import ValueType + +from .lib.embedded import DataTable, NewOnlineFeatureService, OnlineFeatureServiceConfig +from .lib.go import Slice_string +from .type_map import FEAST_TYPE_TO_ARROW_TYPE, arrow_array_to_array_of_proto + +if TYPE_CHECKING: + from feast.feature_store import FeatureStore + + +class EmbeddedOnlineFeatureServer: + def __init__( + self, repo_path: str, repo_config: RepoConfig, feature_store: "FeatureStore" + ): + # keep callback in self to prevent it from GC + self._transformation_callback = partial(transformation_callback, feature_store) + + self._service = NewOnlineFeatureService( + OnlineFeatureServiceConfig( + RepoPath=repo_path, RepoConfig=repo_config.json() + ), + self._transformation_callback, + ) + + def get_online_features( + self, + features_refs: List[str], + feature_service: Optional[FeatureService], + entities: Dict[str, Union[List[Any], Value_pb2.RepeatedValue]], + request_data: Dict[str, Union[List[Any], Value_pb2.RepeatedValue]], + full_feature_names: bool = False, + ): + + if feature_service: + join_keys_types = self._service.GetEntityTypesMapByFeatureService( + feature_service.name + ) + else: + join_keys_types = self._service.GetEntityTypesMap( + Slice_string(features_refs) + ) + + join_keys_types = { + join_key: ValueType(enum_value) for join_key, enum_value in join_keys_types + } + + # Here we create C structures that will be shared between Python and Go. + # We will pass entities as arrow Record Batch to Go part (in_c_array & in_c_schema) + # and receive features as Record Batch from Go (out_c_array & out_c_schema) + # This objects needs to be initialized here in order to correctly + # free them later using Python GC. + ( + entities_c_schema, + entities_ptr_schema, + entities_c_array, + entities_ptr_array, + ) = allocate_schema_and_array() + ( + req_data_c_schema, + req_data_ptr_schema, + req_data_c_array, + req_data_ptr_array, + ) = allocate_schema_and_array() + + ( + features_c_schema, + features_ptr_schema, + features_c_array, + features_ptr_array, + ) = allocate_schema_and_array() + + batch, schema = map_to_record_batch(entities, join_keys_types) + schema._export_to_c(entities_ptr_schema) + batch._export_to_c(entities_ptr_array) + + batch, schema = map_to_record_batch(request_data) + schema._export_to_c(req_data_ptr_schema) + batch._export_to_c(req_data_ptr_array) + + try: + self._service.GetOnlineFeatures( + featureRefs=Slice_string(features_refs), + featureServiceName=feature_service and feature_service.name or "", + entities=DataTable( + SchemaPtr=entities_ptr_schema, DataPtr=entities_ptr_array + ), + requestData=DataTable( + SchemaPtr=req_data_ptr_schema, DataPtr=req_data_ptr_array + ), + fullFeatureNames=full_feature_names, + output=DataTable( + SchemaPtr=features_ptr_schema, DataPtr=features_ptr_array + ), + ) + except RuntimeError as exc: + (msg,) = exc.args + if msg.startswith("featureNameCollisionError"): + feature_refs = msg[len("featureNameCollisionError: ") : msg.find(";")] + feature_refs = feature_refs.split(",") + raise FeatureNameCollisionError( + feature_refs_collisions=feature_refs, + full_feature_names=full_feature_names, + ) + + if msg.startswith("requestDataNotFoundInEntityRowsException"): + feature_refs = msg[len("requestDataNotFoundInEntityRowsException: ") :] + feature_refs = feature_refs.split(",") + raise RequestDataNotFoundInEntityRowsException(feature_refs) + + raise + + record_batch = pa.RecordBatch._import_from_c( + features_ptr_array, features_ptr_schema + ) + resp = record_batch_to_online_response(record_batch) + return OnlineResponse(resp) + + +def _to_arrow(value, type_hint: Optional[ValueType]) -> pa.Array: + if isinstance(value, Value_pb2.RepeatedValue): + _proto_to_arrow(value) + + if type_hint: + feast_type = from_value_type(type_hint) + if feast_type in FEAST_TYPE_TO_ARROW_TYPE: + return pa.array(value, FEAST_TYPE_TO_ARROW_TYPE[feast_type]) + + return pa.array(value) + + +def _proto_to_arrow(value: Value_pb2.RepeatedValue) -> pa.Array: + """ + ToDo: support entity rows already packed in protos + """ + raise NotImplementedError + + +def transformation_callback( + fs: "FeatureStore", + on_demand_feature_view_name: str, + input_arr_ptr: int, + input_schema_ptr: int, + output_arr_ptr: int, + output_schema_ptr: int, + full_feature_names: bool, +) -> int: + odfv = fs.get_on_demand_feature_view(on_demand_feature_view_name) + + input_record = pa.RecordBatch._import_from_c(input_arr_ptr, input_schema_ptr) + + output = odfv.get_transformed_features_df( + input_record.to_pandas(), full_feature_names=full_feature_names + ) + output_record = pa.RecordBatch.from_pandas(output) + + output_record.schema._export_to_c(output_schema_ptr) + output_record._export_to_c(output_arr_ptr) + + return output_record.num_rows + + +def allocate_schema_and_array(): + c_schema = ffi.new("struct ArrowSchema*") + ptr_schema = int(ffi.cast("uintptr_t", c_schema)) + + c_array = ffi.new("struct ArrowArray*") + ptr_array = int(ffi.cast("uintptr_t", c_array)) + return c_schema, ptr_schema, c_array, ptr_array + + +def map_to_record_batch( + map: Dict[str, Union[List[Any], Value_pb2.RepeatedValue]], + type_hint: Optional[Dict[str, ValueType]] = None, +) -> Tuple[pa.RecordBatch, pa.Schema]: + fields = [] + columns = [] + type_hint = type_hint or {} + + for name, values in map.items(): + arr = _to_arrow(values, type_hint.get(name)) + fields.append((name, arr.type)) + columns.append(arr) + + schema = pa.schema(fields) + batch = pa.RecordBatch.from_arrays(columns, schema=schema) + return batch, schema + + +def record_batch_to_online_response(record_batch): + resp = GetOnlineFeaturesResponse() + + for idx, field in enumerate(record_batch.schema): + if field.name.endswith("__timestamp") or field.name.endswith("__status"): + continue + + feature_vector = GetOnlineFeaturesResponse.FeatureVector( + statuses=record_batch.columns[idx + 1].to_pylist(), + event_timestamps=[ + Timestamp(seconds=seconds) + for seconds in record_batch.columns[idx + 2].to_pylist() + ], + ) + + if field.type == pa.null(): + feature_vector.values.extend( + [Value_pb2.Value()] * len(record_batch.columns[idx]) + ) + else: + feature_vector.values.extend( + arrow_array_to_array_of_proto(field.type, record_batch.columns[idx]) + ) + + resp.results.append(feature_vector) + resp.metadata.feature_names.val.append(field.name) + + return resp diff --git a/sdk/python/feast/embedded_go/type_map.py b/sdk/python/feast/embedded_go/type_map.py new file mode 100644 index 0000000000..e70dc3be86 --- /dev/null +++ b/sdk/python/feast/embedded_go/type_map.py @@ -0,0 +1,88 @@ +from typing import List + +import pyarrow as pa +import pytz + +from feast.protos.feast.types import Value_pb2 +from feast.types import Array, PrimitiveFeastType + +PA_TIMESTAMP_TYPE = pa.timestamp("s", tz=pytz.UTC) + +ARROW_TYPE_TO_PROTO_FIELD = { + pa.int32(): "int32_val", + pa.int64(): "int64_val", + pa.float32(): "float_val", + pa.float64(): "double_val", + pa.bool_(): "bool_val", + pa.string(): "string_val", + pa.binary(): "bytes_val", + PA_TIMESTAMP_TYPE: "unix_timestamp_val", +} + +ARROW_LIST_TYPE_TO_PROTO_FIELD = { + pa.int32(): "int32_list_val", + pa.int64(): "int64_list_val", + pa.float32(): "float_list_val", + pa.float64(): "double_list_val", + pa.bool_(): "bool_list_val", + pa.string(): "string_list_val", + pa.binary(): "bytes_list_val", + PA_TIMESTAMP_TYPE: "unix_timestamp_list_val", +} + +ARROW_LIST_TYPE_TO_PROTO_LIST_CLASS = { + pa.int32(): Value_pb2.Int32List, + pa.int64(): Value_pb2.Int64List, + pa.float32(): Value_pb2.FloatList, + pa.float64(): Value_pb2.DoubleList, + pa.bool_(): Value_pb2.BoolList, + pa.string(): Value_pb2.StringList, + pa.binary(): Value_pb2.BytesList, + PA_TIMESTAMP_TYPE: Value_pb2.Int64List, +} + +FEAST_TYPE_TO_ARROW_TYPE = { + PrimitiveFeastType.INT32: pa.int32(), + PrimitiveFeastType.INT64: pa.int64(), + PrimitiveFeastType.FLOAT32: pa.float32(), + PrimitiveFeastType.FLOAT64: pa.float64(), + PrimitiveFeastType.STRING: pa.string(), + PrimitiveFeastType.BYTES: pa.binary(), + PrimitiveFeastType.BOOL: pa.bool_(), + PrimitiveFeastType.UNIX_TIMESTAMP: pa.timestamp("s"), + Array(PrimitiveFeastType.INT32): pa.list_(pa.int32()), + Array(PrimitiveFeastType.INT64): pa.list_(pa.int64()), + Array(PrimitiveFeastType.FLOAT32): pa.list_(pa.float32()), + Array(PrimitiveFeastType.FLOAT64): pa.list_(pa.float64()), + Array(PrimitiveFeastType.STRING): pa.list_(pa.string()), + Array(PrimitiveFeastType.BYTES): pa.list_(pa.binary()), + Array(PrimitiveFeastType.BOOL): pa.list_(pa.bool_()), + Array(PrimitiveFeastType.UNIX_TIMESTAMP): pa.list_(pa.timestamp("s")), +} + + +def arrow_array_to_array_of_proto( + arrow_type: pa.DataType, arrow_array: pa.Array +) -> List[Value_pb2.Value]: + values = [] + if isinstance(arrow_type, pa.ListType): + proto_list_class = ARROW_LIST_TYPE_TO_PROTO_LIST_CLASS[arrow_type.value_type] + proto_field_name = ARROW_LIST_TYPE_TO_PROTO_FIELD[arrow_type.value_type] + + if arrow_type.value_type == PA_TIMESTAMP_TYPE: + arrow_array = arrow_array.cast(pa.list_(pa.int64())) + + for v in arrow_array.tolist(): + values.append( + Value_pb2.Value(**{proto_field_name: proto_list_class(val=v)}) + ) + else: + proto_field_name = ARROW_TYPE_TO_PROTO_FIELD[arrow_type] + + if arrow_type == PA_TIMESTAMP_TYPE: + arrow_array = arrow_array.cast(pa.int64()) + + for v in arrow_array.tolist(): + values.append(Value_pb2.Value(**{proto_field_name: v})) + + return values diff --git a/sdk/python/feast/entity.py b/sdk/python/feast/entity.py index 16740bdc17..2142900050 100644 --- a/sdk/python/feast/entity.py +++ b/sdk/python/feast/entity.py @@ -11,16 +11,13 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import warnings +from datetime import datetime +from typing import Dict, List, Optional -from typing import Dict, MutableMapping, Optional +from google.protobuf.json_format import MessageToJson -import yaml -from google.protobuf import json_format -from google.protobuf.json_format import MessageToDict, MessageToJson -from google.protobuf.timestamp_pb2 import Timestamp - -from feast.loaders import yaml as feast_yaml -from feast.protos.feast.core.Entity_pb2 import Entity as EntityV2Proto +from feast.protos.feast.core.Entity_pb2 import Entity as EntityProto from feast.protos.feast.core.Entity_pb2 import EntityMeta as EntityMetaProto from feast.protos.feast.core.Entity_pb2 import EntitySpecV2 as EntitySpecProto from feast.usage import log_exceptions @@ -29,45 +26,98 @@ class Entity: """ - Represents a collection of entities and associated metadata. + An entity defines a collection of entities for which features can be defined. An + entity can also contain associated metadata. + + Attributes: + name: The unique name of the entity. + value_type: The type of the entity, such as string or float. + join_key: A property that uniquely identifies different entities within the + collection. The join_key property is typically used for joining entities + with their associated features. If not specified, defaults to the name. + description: A human-readable description. + tags: A dictionary of key-value pairs to store arbitrary metadata. + owner: The owner of the entity, typically the email of the primary maintainer. + created_timestamp: The time when the entity was created. + last_updated_timestamp: The time when the entity was last updated. + join_keys: A list of property that uniquely identifies different entities within the + collection. This is meant to replace the `join_key` parameter, but currently only + supports a list of size one. """ + name: str + value_type: ValueType + join_key: str + description: str + tags: Dict[str, str] + owner: str + created_timestamp: Optional[datetime] + last_updated_timestamp: Optional[datetime] + join_keys: List[str] + @log_exceptions def __init__( self, - name: str, + *args, + name: Optional[str] = None, value_type: ValueType = ValueType.UNKNOWN, description: str = "", join_key: Optional[str] = None, - labels: Optional[MutableMapping[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + owner: str = "", + join_keys: Optional[List[str]] = None, ): - self._name = name - self._description = description - self._value_type = value_type - if join_key: - self._join_key = join_key - else: - self._join_key = name + """Creates an Entity object.""" + if len(args) == 1: + warnings.warn( + ( + "Entity name should be specified as a keyword argument instead of a positional arg." + "Feast 0.23+ will not support positional arguments to construct Entities" + ), + DeprecationWarning, + ) + if len(args) > 1: + raise ValueError( + "All arguments to construct an entity should be specified as keyword arguments only" + ) + + self.name = args[0] if len(args) > 0 else name - self._labels: MutableMapping[str, str] - if labels is None: - self._labels = dict() + if not self.name: + raise ValueError("Name needs to be specified") + + self.value_type = value_type + + self.join_keys = join_keys or [] + if join_keys and len(join_keys) > 1: + raise ValueError( + "An entity may only have single join key. " + "Multiple join keys will be supported in the future." + ) + if join_keys and len(join_keys) == 1: + self.join_key = join_keys[0] else: - self._labels = labels + self.join_key = join_key if join_key else self.name + self.description = description + self.tags = tags if tags is not None else {} + self.owner = owner + self.created_timestamp = None + self.last_updated_timestamp = None - self._created_timestamp: Optional[Timestamp] = None - self._last_updated_timestamp: Optional[Timestamp] = None + def __hash__(self) -> int: + return hash((self.name, self.join_key)) def __eq__(self, other): if not isinstance(other, Entity): raise TypeError("Comparisons should only involve Entity class objects.") if ( - self.labels != other.labels - or self.name != other.name - or self.description != other.description + self.name != other.name or self.value_type != other.value_type or self.join_key != other.join_key + or self.description != other.description + or self.tags != other.tags + or self.owner != other.owner ): return False @@ -76,239 +126,68 @@ def __eq__(self, other): def __str__(self): return str(MessageToJson(self.to_proto())) - @property - def name(self): - """ - Returns the name of this entity - """ - return self._name - - @name.setter - def name(self, name): - """ - Sets the name of this entity - """ - self._name = name - - @property - def description(self): - """ - Returns the description of this entity - """ - return self._description - - @description.setter - def description(self, description): - """ - Sets the description of this entity - """ - self._description = description - - @property - def join_key(self): - """ - Returns the join key of this entity - """ - return self._join_key - - @join_key.setter - def join_key(self, join_key): - """ - Sets the join key of this entity - """ - self._join_key = join_key - - @property - def value_type(self) -> ValueType: - """ - Returns the type of this entity - """ - return self._value_type - - @value_type.setter - def value_type(self, value_type: ValueType): - """ - Set the type for this entity - """ - self._value_type = value_type - - @property - def labels(self): - """ - Returns the labels of this entity. This is the user defined metadata - defined as a dictionary. - """ - return self._labels - - @labels.setter - def labels(self, labels: MutableMapping[str, str]): - """ - Set the labels for this entity - """ - self._labels = labels - - @property - def created_timestamp(self): - """ - Returns the created_timestamp of this entity - """ - return self._created_timestamp - - @property - def last_updated_timestamp(self): - """ - Returns the last_updated_timestamp of this entity - """ - return self._last_updated_timestamp - def is_valid(self): """ - Validates the state of a entity locally. Raises an exception - if entity is invalid. - """ + Validates the state of this entity locally. + Raises: + ValueError: The entity does not have a name or does not have a type. + """ if not self.name: - raise ValueError("No name found in entity.") + raise ValueError("The entity does not have a name.") if not self.value_type: - raise ValueError("No type found in entity {self.value_type}") - - @classmethod - def from_yaml(cls, yml: str): - """ - Creates an entity from a YAML string body or a file path - - Args: - yml: Either a file path containing a yaml file or a YAML string - - Returns: - Returns a EntityV2 object based on the YAML file - """ - - return cls.from_dict(feast_yaml.yaml_loader(yml, load_single=True)) - - @classmethod - def from_dict(cls, entity_dict): - """ - Creates an entity from a dict - - Args: - entity_dict: A dict representation of an entity - - Returns: - Returns a EntityV2 object based on the entity dict - """ - - entity_proto = json_format.ParseDict( - entity_dict, EntityV2Proto(), ignore_unknown_fields=True - ) - return cls.from_proto(entity_proto) + raise ValueError(f"The entity {self.name} does not have a type.") @classmethod - def from_proto(cls, entity_proto: EntityV2Proto): + def from_proto(cls, entity_proto: EntityProto): """ - Creates an entity from a protobuf representation of an entity + Creates an entity from a protobuf representation of an entity. Args: - entity_proto: A protobuf representation of an entity + entity_proto: A protobuf representation of an entity. Returns: - Returns a EntityV2 object based on the entity protobuf + An Entity object based on the entity protobuf. """ - entity = cls( name=entity_proto.spec.name, - description=entity_proto.spec.description, value_type=ValueType(entity_proto.spec.value_type), - labels=entity_proto.spec.labels, join_key=entity_proto.spec.join_key, + description=entity_proto.spec.description, + tags=entity_proto.spec.tags, + owner=entity_proto.spec.owner, ) - entity._created_timestamp = entity_proto.meta.created_timestamp - entity._last_updated_timestamp = entity_proto.meta.last_updated_timestamp + if entity_proto.meta.HasField("created_timestamp"): + entity.created_timestamp = entity_proto.meta.created_timestamp.ToDatetime() + if entity_proto.meta.HasField("last_updated_timestamp"): + entity.last_updated_timestamp = ( + entity_proto.meta.last_updated_timestamp.ToDatetime() + ) return entity - def to_proto(self) -> EntityV2Proto: + def to_proto(self) -> EntityProto: """ - Converts an entity object to its protobuf representation + Converts an entity object to its protobuf representation. Returns: - EntityV2Proto protobuf + An EntityProto protobuf. """ - - meta = EntityMetaProto( - created_timestamp=self.created_timestamp, - last_updated_timestamp=self.last_updated_timestamp, - ) + meta = EntityMetaProto() + if self.created_timestamp: + meta.created_timestamp.FromDatetime(self.created_timestamp) + if self.last_updated_timestamp: + meta.last_updated_timestamp.FromDatetime(self.last_updated_timestamp) spec = EntitySpecProto( name=self.name, - description=self.description, value_type=self.value_type.value, - labels=self.labels, join_key=self.join_key, - ) - - return EntityV2Proto(spec=spec, meta=meta) - - def to_dict(self) -> Dict: - """ - Converts entity to dict - - Returns: - Dictionary object representation of entity - """ - - entity_dict = MessageToDict(self.to_proto()) - - # Remove meta when empty for more readable exports - if entity_dict["meta"] == {}: - del entity_dict["meta"] - - return entity_dict - - def to_yaml(self): - """ - Converts a entity to a YAML string. - - Returns: - Entity string returned in YAML format - """ - entity_dict = self.to_dict() - return yaml.dump(entity_dict, allow_unicode=True, sort_keys=False) - - def to_spec_proto(self) -> EntitySpecProto: - """ - Converts an EntityV2 object to its protobuf representation. - Used when passing EntitySpecV2 object to Feast request. - - Returns: - EntitySpecV2 protobuf - """ - - spec = EntitySpecProto( - name=self.name, description=self.description, - value_type=self.value_type.value, - labels=self.labels, - join_key=self.join_key, + tags=self.tags, + owner=self.owner, ) - return spec - - def _update_from_entity(self, entity): - """ - Deep replaces one entity with another - - Args: - entity: Entity to use as a source of configuration - """ - - self.name = entity.name - self.description = entity.description - self.value_type = entity.value_type - self.labels = entity.labels - self.join_key = entity.join_key - self._created_timestamp = entity.created_timestamp - self._last_updated_timestamp = entity.last_updated_timestamp + return EntityProto(spec=spec, meta=meta) diff --git a/sdk/python/feast/errors.py b/sdk/python/feast/errors.py index 61fee8f918..e680337d98 100644 --- a/sdk/python/feast/errors.py +++ b/sdk/python/feast/errors.py @@ -1,4 +1,4 @@ -from typing import List, Set +from typing import Any, List, Set from colorama import Fore, Style @@ -10,6 +10,13 @@ def __init__(self, path): ) +class DataSourceNoNameException(Exception): + def __init__(self): + super().__init__( + "Unable to infer a name for this data source. Either table or name must be specified." + ) + + class FeastObjectNotFoundException(Exception): pass @@ -22,6 +29,16 @@ def __init__(self, name, project=None): super().__init__(f"Entity {name} does not exist") +class FeatureServiceNotFoundException(FeastObjectNotFoundException): + def __init__(self, name, project=None): + if project: + super().__init__( + f"Feature service {name} does not exist in project {project}" + ) + else: + super().__init__(f"Feature service {name} does not exist") + + class FeatureViewNotFoundException(FeastObjectNotFoundException): def __init__(self, name, project=None): if project: @@ -30,14 +47,36 @@ def __init__(self, name, project=None): super().__init__(f"Feature view {name} does not exist") -class FeatureTableNotFoundException(FeastObjectNotFoundException): +class OnDemandFeatureViewNotFoundException(FeastObjectNotFoundException): def __init__(self, name, project=None): if project: super().__init__( - f"Feature table {name} does not exist in project {project}" + f"On demand feature view {name} does not exist in project {project}" ) else: - super().__init__(f"Feature table {name} does not exist") + super().__init__(f"On demand feature view {name} does not exist") + + +class RequestDataNotFoundInEntityDfException(FeastObjectNotFoundException): + def __init__(self, feature_name, feature_view_name): + super().__init__( + f"Feature {feature_name} not found in the entity dataframe, but required by feature view {feature_view_name}" + ) + + +class RequestDataNotFoundInEntityRowsException(FeastObjectNotFoundException): + def __init__(self, feature_names): + super().__init__( + f"Required request data source features {feature_names} not found in the entity rows, but required by feature views" + ) + + +class DataSourceObjectNotFoundException(FeastObjectNotFoundException): + def __init__(self, name, project=None): + if project: + super().__init__(f"Data source {name} does not exist in project {project}") + else: + super().__init__(f"Data source {name} does not exist") class S3RegistryBucketNotExist(FeastObjectNotFoundException): @@ -50,6 +89,11 @@ def __init__(self, bucket): super().__init__(f"S3 bucket {bucket} for the Feast registry can't be accessed") +class SavedDatasetNotFound(FeastObjectNotFoundException): + def __init__(self, name: str, project: str): + super().__init__(f"Saved dataset {name} does not exist in project {project}") + + class FeastProviderLoginError(Exception): """Error class that indicates a user has not authenticated with their provider.""" @@ -59,15 +103,36 @@ def __init__(self, provider_name): super().__init__(f"Provider '{provider_name}' is not implemented") +class FeastProviderNotSetError(Exception): + def __init__(self): + super().__init__("Provider is not set, but is required") + + +class FeastFeatureServerTypeSetError(Exception): + def __init__(self, feature_server_type: str): + super().__init__( + f"Feature server type was set to {feature_server_type}, but the type should be determined by the provider" + ) + + +class FeastFeatureServerTypeInvalidError(Exception): + def __init__(self, feature_server_type: str): + super().__init__( + f"Feature server type was set to {feature_server_type}, but this type is invalid" + ) + + class FeastModuleImportError(Exception): - def __init__(self, module_name: str, module_type: str): - super().__init__(f"Could not import {module_type} module '{module_name}'") + def __init__(self, module_name: str, class_name: str): + super().__init__( + f"Could not import module '{module_name}' while attempting to load class '{class_name}'" + ) class FeastClassImportError(Exception): - def __init__(self, module_name, class_name, class_type="provider"): + def __init__(self, module_name: str, class_name: str): super().__init__( - f"Could not import {class_type} '{class_name}' from module '{module_name}'" + f"Could not import class '{class_name}' from module '{module_name}'" ) @@ -93,8 +158,9 @@ def __init__(self, feature_refs_collisions: List[str], full_feature_names: bool) if full_feature_names: collisions = [ref.replace(":", "__") for ref in feature_refs_collisions] error_message = ( - "To resolve this collision, please ensure that the features in question " - "have different names." + "To resolve this collision, please ensure that the feature views or their own features " + "have different names. If you're intentionally joining the same feature view twice on " + "different sets of entities, please rename one of the feature views with '.with_name'." ) else: collisions = [ref.split(":")[1] for ref in feature_refs_collisions] @@ -109,6 +175,14 @@ def __init__(self, feature_refs_collisions: List[str], full_feature_names: bool) ) +class SpecifiedFeaturesNotPresentError(Exception): + def __init__(self, specified_features: List[str], feature_view_name: str): + features = ", ".join(specified_features) + super().__init__( + f"Explicitly specified features {features} not found in inferred list of features for '{feature_view_name}'" + ) + + class FeastOnlineStoreInvalidName(Exception): def __init__(self, online_store_class_name: str): super().__init__( @@ -116,11 +190,10 @@ def __init__(self, online_store_class_name: str): ) -class FeastClassInvalidName(Exception): +class FeastInvalidBaseClass(Exception): def __init__(self, class_name: str, class_type: str): super().__init__( - f"Config Class '{class_name}' " - f"should end with the string `{class_type}`.'" + f"Class '{class_name}' should have `{class_type}` as a base class." ) @@ -149,6 +222,14 @@ def __init__( ) +class DockerDaemonNotRunning(Exception): + def __init__(self): + super().__init__( + "The Docker Python sdk cannot connect to the Docker daemon. Please make sure you have" + "the docker daemon installed, and that it is running." + ) + + class RegistryInferenceFailure(Exception): def __init__(self, repo_obj_type: str, specific_issue: str): super().__init__( @@ -175,3 +256,99 @@ def __init__(self): class RedshiftQueryError(Exception): def __init__(self, details): super().__init__(f"Redshift SQL Query failed to finish. Details: {details}") + + +class RedshiftTableNameTooLong(Exception): + def __init__(self, table_name: str): + super().__init__( + f"Redshift table names have a maximum length of 127 characters, but the table name {table_name} has length {len(table_name)} characters." + ) + + +class SnowflakeCredentialsError(Exception): + def __init__(self): + super().__init__("Snowflake Connector failed due to incorrect credentials") + + +class SnowflakeQueryError(Exception): + def __init__(self, details): + super().__init__(f"Snowflake SQL Query failed to finish. Details: {details}") + + +class EntityTimestampInferenceException(Exception): + def __init__(self, expected_column_name: str): + super().__init__( + f"Please provide an entity_df with a column named {expected_column_name} representing the time of events." + ) + + +class InvalidEntityType(Exception): + def __init__(self, entity_type: type): + super().__init__( + f"The entity dataframe you have provided must be a Pandas DataFrame or a SQL query, " + f"but we found: {entity_type} " + ) + + +class ConflictingFeatureViewNames(Exception): + # TODO: print file location of conflicting feature views + def __init__(self, feature_view_name: str): + super().__init__( + f"The feature view name: {feature_view_name} refers to feature views of different types." + ) + + +class ExperimentalFeatureNotEnabled(Exception): + def __init__(self, feature_flag_name: str): + super().__init__( + f"You are attempting to use an experimental feature that is not enabled. Please run " + f"`feast alpha enable {feature_flag_name}` " + ) + + +class RepoConfigPathDoesNotExist(Exception): + def __init__(self): + super().__init__("The repo_path attribute does not exist for the repo_config.") + + +class AwsLambdaDoesNotExist(Exception): + def __init__(self, resource_name: str): + super().__init__( + f"The AWS Lambda function {resource_name} should have been created properly, but does not exist." + ) + + +class AwsAPIGatewayDoesNotExist(Exception): + def __init__(self, resource_name: str): + super().__init__( + f"The AWS API Gateway {resource_name} should have been created properly, but does not exist." + ) + + +class IncompatibleRegistryStoreClass(Exception): + def __init__(self, actual_class: str, expected_class: str): + super().__init__( + f"The registry store class was expected to be {expected_class}, but was instead {actual_class}." + ) + + +class FeastInvalidInfraObjectType(Exception): + def __init__(self): + super().__init__("Could not identify the type of the InfraObject.") + + +class SnowflakeIncompleteConfig(Exception): + def __init__(self, e: KeyError): + super().__init__(f"{e} not defined in a config file or feature_store.yaml file") + + +class SnowflakeQueryUnknownError(Exception): + def __init__(self, query: str): + super().__init__(f"Snowflake query failed: {query}") + + +class InvalidFeaturesParameterType(Exception): + def __init__(self, features: Any): + super().__init__( + f"Invalid `features` parameter type {type(features)}. Expected one of List[str] and FeatureService." + ) diff --git a/sdk/python/feast/feast_object.py b/sdk/python/feast/feast_object.py new file mode 100644 index 0000000000..4ffd693c44 --- /dev/null +++ b/sdk/python/feast/feast_object.py @@ -0,0 +1,33 @@ +from typing import Union + +from .data_source import DataSource +from .entity import Entity +from .feature_service import FeatureService +from .feature_view import FeatureView +from .on_demand_feature_view import OnDemandFeatureView +from .protos.feast.core.DataSource_pb2 import DataSource as DataSourceProto +from .protos.feast.core.Entity_pb2 import EntitySpecV2 +from .protos.feast.core.FeatureService_pb2 import FeatureServiceSpec +from .protos.feast.core.FeatureView_pb2 import FeatureViewSpec +from .protos.feast.core.OnDemandFeatureView_pb2 import OnDemandFeatureViewSpec +from .protos.feast.core.RequestFeatureView_pb2 import RequestFeatureViewSpec +from .request_feature_view import RequestFeatureView + +# Convenience type representing all Feast objects +FeastObject = Union[ + FeatureView, + OnDemandFeatureView, + RequestFeatureView, + Entity, + FeatureService, + DataSource, +] + +FeastObjectSpecProto = Union[ + FeatureViewSpec, + OnDemandFeatureViewSpec, + RequestFeatureViewSpec, + EntitySpecV2, + FeatureServiceSpec, + DataSourceProto, +] diff --git a/sdk/python/feast/feature.py b/sdk/python/feast/feature.py index 1eec6d6bd8..d1f96c302a 100644 --- a/sdk/python/feast/feature.py +++ b/sdk/python/feast/feature.py @@ -12,73 +12,86 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import List, MutableMapping, Optional +from typing import Dict, Optional from feast.protos.feast.core.Feature_pb2 import FeatureSpecV2 as FeatureSpecProto -from feast.protos.feast.serving.ServingService_pb2 import ( - FeatureReferenceV2 as FeatureRefProto, -) -from feast.protos.feast.types import Value_pb2 as ValueTypeProto +from feast.protos.feast.types.Value_pb2 import ValueType as ValueTypeProto from feast.value_type import ValueType class Feature: - """Feature field type""" + """ + A Feature represents a class of serveable feature. + + Args: + name: Name of the feature. + dtype: The type of the feature, such as string or float. + labels (optional): User-defined metadata in dictionary form. + """ def __init__( - self, - name: str, - dtype: ValueType, - labels: Optional[MutableMapping[str, str]] = None, + self, name: str, dtype: ValueType, labels: Optional[Dict[str, str]] = None, ): + """Creates a Feature object.""" self._name = name if not isinstance(dtype, ValueType): raise ValueError("dtype is not a valid ValueType") + if dtype is ValueType.UNKNOWN: + raise ValueError(f"dtype cannot be {dtype}") self._dtype = dtype if labels is None: - self._labels = dict() # type: MutableMapping + self._labels = dict() else: self._labels = labels def __eq__(self, other): - if ( - self.name != other.name - or self.dtype != other.dtype - or self.labels != other.labels - ): + if self.name != other.name or self.dtype != other.dtype: return False return True def __lt__(self, other): return self.name < other.name + def __repr__(self): + # return string representation of the reference + return f"{self.name}-{self.dtype}" + + def __str__(self): + # readable string of the reference + return f"Feature<{self.__repr__()}>" + @property def name(self): """ - Getter for name of this field + Gets the name of this feature. """ return self._name @property def dtype(self) -> ValueType: """ - Getter for data type of this field + Gets the data type of this feature. """ return self._dtype @property - def labels(self) -> MutableMapping[str, str]: + def labels(self) -> Dict[str, str]: """ - Getter for labels of this field + Gets the labels of this feature. """ return self._labels def to_proto(self) -> FeatureSpecProto: - """Converts Feature object to its Protocol Buffer representation""" - value_type = ValueTypeProto.ValueType.Enum.Value(self.dtype.name) + """ + Converts Feature object to its Protocol Buffer representation. + + Returns: + A FeatureSpecProto protobuf. + """ + value_type = ValueTypeProto.Enum.Value(self.dtype.name) return FeatureSpecProto( - name=self.name, value_type=value_type, labels=self.labels, + name=self.name, value_type=value_type, tags=self.labels, ) @classmethod @@ -90,90 +103,10 @@ def from_proto(cls, feature_proto: FeatureSpecProto): Returns: Feature object """ - feature = cls( name=feature_proto.name, dtype=ValueType(feature_proto.value_type), - labels=feature_proto.labels, + labels=dict(feature_proto.tags), ) return feature - - -class FeatureRef: - """ Feature Reference represents a reference to a specific feature. """ - - def __init__(self, name: str, feature_table: str): - self.proto = FeatureRefProto(name=name, feature_table=feature_table) - - @classmethod - def from_proto(cls, proto: FeatureRefProto): - """ - Construct a feature reference from the given FeatureReference proto - - Args: - proto: Protobuf FeatureReference to construct from - Returns: - FeatureRef that refers to the given feature - """ - return cls(name=proto.name, feature_table=proto.feature_table) - - @classmethod - def from_str(cls, feature_ref_str: str): - """ - Parse the given string feature reference into FeatureRef model - String feature reference should be in the format feature_table:feature. - Where "feature_table" and "name" are the feature_table name and feature name - respectively. - - Args: - feature_ref_str: String representation of the feature reference - Returns: - FeatureRef that refers to the given feature - """ - proto = FeatureRefProto() - - # parse feature table name if specified - if ":" in feature_ref_str: - proto.feature_table, proto.name = feature_ref_str.split(":") - else: - raise ValueError( - f"Unsupported feature reference: {feature_ref_str} - Feature reference string should be in the form [featuretable_name:featurename]" - ) - - return cls.from_proto(proto) - - def to_proto(self) -> FeatureRefProto: - """ - Convert and return this feature table reference to protobuf. - - Returns: - Protobuf respresentation of this feature table reference. - """ - - return self.proto - - def __repr__(self): - # return string representation of the reference - ref_str = self.proto.feature_table + ":" + self.proto.name - return ref_str - - def __str__(self): - # readable string of the reference - return f"FeatureRef<{self.__repr__()}>" - - -def _build_feature_references(feature_ref_strs: List[str]) -> List[FeatureRefProto]: - """ - Builds a list of FeatureReference protos from a list of FeatureReference strings - - Args: - feature_ref_strs: List of string feature references - Returns: - A list of FeatureReference protos parsed from args. - """ - - feature_refs = [FeatureRef.from_str(ref_str) for ref_str in feature_ref_strs] - feature_ref_protos = [ref.to_proto() for ref in feature_refs] - - return feature_ref_protos diff --git a/sdk/python/feast/feature_server.py b/sdk/python/feast/feature_server.py new file mode 100644 index 0000000000..8347bed6da --- /dev/null +++ b/sdk/python/feast/feature_server.py @@ -0,0 +1,121 @@ +import json +import traceback +import warnings + +import pandas as pd +import uvicorn +from fastapi import FastAPI, HTTPException, Request +from fastapi.logger import logger +from fastapi.params import Depends +from google.protobuf.json_format import MessageToDict, Parse +from pydantic import BaseModel + +import feast +from feast import proto_json +from feast.protos.feast.serving.ServingService_pb2 import GetOnlineFeaturesRequest + + +# TODO: deprecate this in favor of push features +class WriteToFeatureStoreRequest(BaseModel): + feature_view_name: str + df: dict + allow_registry_cache: bool = True + + +class PushFeaturesRequest(BaseModel): + push_source_name: str + df: dict + allow_registry_cache: bool = True + + +def get_app(store: "feast.FeatureStore"): + proto_json.patch() + + app = FastAPI() + + async def get_body(request: Request): + return await request.body() + + @app.post("/get-online-features") + def get_online_features(body=Depends(get_body)): + try: + # Validate and parse the request data into GetOnlineFeaturesRequest Protobuf object + request_proto = GetOnlineFeaturesRequest() + Parse(body, request_proto) + + # Initialize parameters for FeatureStore.get_online_features(...) call + if request_proto.HasField("feature_service"): + features = store.get_feature_service( + request_proto.feature_service, allow_cache=True + ) + else: + features = list(request_proto.features.val) + + full_feature_names = request_proto.full_feature_names + + batch_sizes = [len(v.val) for v in request_proto.entities.values()] + num_entities = batch_sizes[0] + if any(batch_size != num_entities for batch_size in batch_sizes): + raise HTTPException(status_code=500, detail="Uneven number of columns") + + response_proto = store._get_online_features( + features=features, + entity_values=request_proto.entities, + full_feature_names=full_feature_names, + native_entity_values=False, + ).proto + + # Convert the Protobuf object to JSON and return it + return MessageToDict( # type: ignore + response_proto, preserving_proto_field_name=True, float_precision=18 + ) + except Exception as e: + # Print the original exception on the server side + logger.exception(traceback.format_exc()) + # Raise HTTPException to return the error message to the client + raise HTTPException(status_code=500, detail=str(e)) + + @app.post("/push") + def push(body=Depends(get_body)): + try: + request = PushFeaturesRequest(**json.loads(body)) + df = pd.DataFrame(request.df) + store.push( + push_source_name=request.push_source_name, + df=df, + allow_registry_cache=request.allow_registry_cache, + ) + except Exception as e: + # Print the original exception on the server side + logger.exception(traceback.format_exc()) + # Raise HTTPException to return the error message to the client + raise HTTPException(status_code=500, detail=str(e)) + + @app.post("/write-to-online-store") + def write_to_online_store(body=Depends(get_body)): + warnings.warn( + "write_to_online_store is deprecated. Please consider using /push instead", + RuntimeWarning, + ) + try: + request = WriteToFeatureStoreRequest(**json.loads(body)) + df = pd.DataFrame(request.df) + store.write_to_online_store( + feature_view_name=request.feature_view_name, + df=df, + allow_registry_cache=request.allow_registry_cache, + ) + except Exception as e: + # Print the original exception on the server side + logger.exception(traceback.format_exc()) + # Raise HTTPException to return the error message to the client + raise HTTPException(status_code=500, detail=str(e)) + + return app + + +def start_server( + store: "feast.FeatureStore", host: str, port: int, no_access_log: bool +): + app = get_app(store) + uvicorn.run(app, host=host, port=port, access_log=(not no_access_log)) diff --git a/sdk/python/feast/feature_service.py b/sdk/python/feast/feature_service.py index aaff436c28..492d31a809 100644 --- a/sdk/python/feast/feature_service.py +++ b/sdk/python/feast/feature_service.py @@ -1,50 +1,163 @@ +import warnings from datetime import datetime -from typing import List, Optional, Union +from typing import Dict, List, Optional, Union -from feast.feature_table import FeatureTable +from google.protobuf.json_format import MessageToJson + +from feast.base_feature_view import BaseFeatureView from feast.feature_view import FeatureView from feast.feature_view_projection import FeatureViewProjection +from feast.on_demand_feature_view import OnDemandFeatureView from feast.protos.feast.core.FeatureService_pb2 import ( FeatureService as FeatureServiceProto, ) from feast.protos.feast.core.FeatureService_pb2 import ( - FeatureServiceMeta, - FeatureServiceSpec, + FeatureServiceMeta as FeatureServiceMetaProto, +) +from feast.protos.feast.core.FeatureService_pb2 import ( + FeatureServiceSpec as FeatureServiceSpecProto, ) +from feast.usage import log_exceptions class FeatureService: + """ + A feature service defines a logical group of features from one or more feature views. + This group of features can be retrieved together during training or serving. + + Attributes: + name: The unique name of the feature service. + feature_view_projections: A list containing feature views and feature view + projections, representing the features in the feature service. + description: A human-readable description. + tags: A dictionary of key-value pairs to store arbitrary metadata. + owner: The owner of the feature service, typically the email of the primary + maintainer. + created_timestamp: The time when the feature service was created. + last_updated_timestamp: The time when the feature service was last updated. + """ + name: str - features: List[FeatureViewProjection] + feature_view_projections: List[FeatureViewProjection] + description: str + tags: Dict[str, str] + owner: str created_timestamp: Optional[datetime] = None last_updated_timestamp: Optional[datetime] = None + @log_exceptions def __init__( self, - name: str, - features: List[Union[FeatureTable, FeatureView, FeatureViewProjection]], + *args, + name: Optional[str] = None, + features: Optional[List[Union[FeatureView, OnDemandFeatureView]]] = None, + tags: Dict[str, str] = None, + description: str = "", + owner: str = "", ): - self.name = name - self.features = [] - for feature in features: - if isinstance(feature, FeatureTable) or isinstance(feature, FeatureView): - self.features.append(FeatureViewProjection.from_definition(feature)) - elif isinstance(feature, FeatureViewProjection): - self.features.append(feature) + """ + Creates a FeatureService object. + + Raises: + ValueError: If one of the specified features is not a valid type. + """ + positional_attributes = ["name", "features"] + _name = name + _features = features + if args: + warnings.warn( + ( + "Feature service parameters should be specified as a keyword argument instead of a positional arg." + "Feast 0.23+ will not support positional arguments to construct feature service" + ), + DeprecationWarning, + ) + if len(args) > len(positional_attributes): + raise ValueError( + f"Only {', '.join(positional_attributes)} are allowed as positional args when defining " + f"feature service, for backwards compatibility." + ) + if len(args) >= 1: + _name = args[0] + if len(args) >= 2: + _features = args[1] + + if not _name: + raise ValueError("Feature service name needs to be specified") + + if not _features: + # Technically, legal to create feature service with no feature views before. + _features = [] + + self.name = _name + self.feature_view_projections = [] + + for feature_grouping in _features: + if isinstance(feature_grouping, BaseFeatureView): + self.feature_view_projections.append(feature_grouping.projection) else: - raise ValueError(f"Unexpected type: {type(feature)}") + raise ValueError( + f"The feature service {name} has been provided with an invalid type " + f'{type(feature_grouping)} as part of the "features" argument.)' + ) + + self.description = description + self.tags = tags or {} + self.owner = owner + self.created_timestamp = None + self.last_updated_timestamp = None + + def __repr__(self): + items = (f"{k} = {v}" for k, v in self.__dict__.items()) + return f"<{self.__class__.__name__}({', '.join(items)})>" + + def __str__(self): + return str(MessageToJson(self.to_proto())) + + def __hash__(self): + return hash((self.name)) def __eq__(self, other): - pass + if not isinstance(other, FeatureService): + raise TypeError( + "Comparisons should only involve FeatureService class objects." + ) + + if ( + self.name != other.name + or self.description != other.description + or self.tags != other.tags + or self.owner != other.owner + ): + return False + + if sorted(self.feature_view_projections) != sorted( + other.feature_view_projections + ): + return False - @staticmethod - def from_proto(feature_service_proto: FeatureServiceProto): - fs = FeatureService( + return True + + @classmethod + def from_proto(cls, feature_service_proto: FeatureServiceProto): + """ + Converts a FeatureServiceProto to a FeatureService object. + + Args: + feature_service_proto: A protobuf representation of a FeatureService. + """ + fs = cls( name=feature_service_proto.spec.name, - features=[ - FeatureViewProjection.from_proto(fp) - for fp in feature_service_proto.spec.features - ], + features=[], + tags=dict(feature_service_proto.spec.tags), + description=feature_service_proto.spec.description, + owner=feature_service_proto.spec.owner, + ) + fs.feature_view_projections.extend( + [ + FeatureViewProjection.from_proto(projection) + for projection in feature_service_proto.spec.features + ] ) if feature_service_proto.meta.HasField("created_timestamp"): @@ -58,27 +171,30 @@ def from_proto(feature_service_proto: FeatureServiceProto): return fs - def to_proto(self): - meta = FeatureServiceMeta() + def to_proto(self) -> FeatureServiceProto: + """ + Converts a feature service to its protobuf representation. + + Returns: + A FeatureServiceProto protobuf. + """ + meta = FeatureServiceMetaProto() if self.created_timestamp: meta.created_timestamp.FromDatetime(self.created_timestamp) + if self.last_updated_timestamp: + meta.last_updated_timestamp.FromDatetime(self.last_updated_timestamp) - spec = FeatureServiceSpec() - spec.name = self.name - for definition in self.features: - if isinstance(definition, FeatureTable) or isinstance( - definition, FeatureView - ): - feature_ref = FeatureViewProjection( - definition.name, definition.features - ) - else: - feature_ref = definition - - spec.features.append(feature_ref.to_proto()) + spec = FeatureServiceSpecProto( + name=self.name, + features=[ + projection.to_proto() for projection in self.feature_view_projections + ], + tags=self.tags, + description=self.description, + owner=self.owner, + ) - feature_service_proto = FeatureServiceProto(spec=spec, meta=meta) - return feature_service_proto + return FeatureServiceProto(spec=spec, meta=meta) def validate(self): pass diff --git a/sdk/python/feast/feature_store.py b/sdk/python/feast/feature_store.py index 2bcf939cad..4f456be384 100644 --- a/sdk/python/feast/feature_store.py +++ b/sdk/python/feast/feature_store.py @@ -11,57 +11,119 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import copy +import itertools import os -from collections import Counter, OrderedDict, defaultdict -from datetime import datetime, timedelta +import warnings +from collections import Counter, defaultdict +from datetime import datetime from pathlib import Path -from typing import Any, Dict, List, Optional, Tuple, Union +from typing import ( + TYPE_CHECKING, + Any, + Dict, + Iterable, + List, + Mapping, + Optional, + Sequence, + Set, + Tuple, + Union, + cast, +) import pandas as pd from colorama import Fore, Style +from google.protobuf.timestamp_pb2 import Timestamp from tqdm import tqdm -from feast import utils +from feast import feature_server, flags, flags_helper, utils +from feast.base_feature_view import BaseFeatureView +from feast.data_source import DataSource +from feast.diff.infra_diff import InfraDiff, diff_infra_protos +from feast.diff.registry_diff import RegistryDiff, apply_diff_to_registry, diff_between from feast.entity import Entity -from feast.errors import FeatureNameCollisionError, FeatureViewNotFoundException -from feast.feature_table import FeatureTable -from feast.feature_view import FeatureView +from feast.errors import ( + EntityNotFoundException, + ExperimentalFeatureNotEnabled, + FeatureNameCollisionError, + FeatureViewNotFoundException, + RequestDataNotFoundInEntityDfException, + RequestDataNotFoundInEntityRowsException, +) +from feast.feast_object import FeastObject +from feast.feature_service import FeatureService +from feast.feature_view import ( + DUMMY_ENTITY, + DUMMY_ENTITY_ID, + DUMMY_ENTITY_NAME, + DUMMY_ENTITY_VAL, + FeatureView, +) from feast.inference import ( update_data_sources_with_inferred_event_timestamp_col, update_entities_with_inferred_types_from_feature_views, + update_feature_views_with_inferred_features, ) +from feast.infra.infra_object import Infra from feast.infra.provider import Provider, RetrievalJob, get_provider -from feast.online_response import OnlineResponse, _infer_online_entity_rows +from feast.on_demand_feature_view import OnDemandFeatureView +from feast.online_response import OnlineResponse +from feast.protos.feast.core.InfraObject_pb2 import Infra as InfraProto from feast.protos.feast.serving.ServingService_pb2 import ( - GetOnlineFeaturesRequestV2, + FieldStatus, GetOnlineFeaturesResponse, ) from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto +from feast.protos.feast.types.Value_pb2 import RepeatedValue, Value from feast.registry import Registry from feast.repo_config import RepoConfig, load_repo_config -from feast.usage import log_exceptions, log_exceptions_and_usage +from feast.repo_contents import RepoContents +from feast.request_feature_view import RequestFeatureView +from feast.saved_dataset import SavedDataset, SavedDatasetStorage +from feast.type_map import ( + feast_value_type_to_python_type, + python_values_to_proto_values, +) +from feast.usage import log_exceptions, log_exceptions_and_usage, set_usage_attribute +from feast.value_type import ValueType from feast.version import get_version +warnings.simplefilter("once", DeprecationWarning) + +if TYPE_CHECKING: + from feast.embedded_go.online_features_service import EmbeddedOnlineFeatureServer + class FeatureStore: """ A FeatureStore object is used to define, create, and retrieve features. Args: - repo_path: Path to a `feature_store.yaml` used to configure the feature store - config (RepoConfig): Configuration object used to configure the feature store + repo_path (optional): Path to a `feature_store.yaml` used to configure the + feature store. + config (optional): Configuration object used to configure the feature store. """ config: RepoConfig repo_path: Path _registry: Registry + _provider: Provider + _go_server: Optional["EmbeddedOnlineFeatureServer"] @log_exceptions def __init__( self, repo_path: Optional[str] = None, config: Optional[RepoConfig] = None, ): + """ + Creates a FeatureStore object. + + Raises: + ValueError: If both or neither of repo_path and config are specified. + """ if repo_path is not None and config is not None: - raise ValueError("You cannot specify both repo_path and config") + raise ValueError("You cannot specify both repo_path and config.") if config is not None: self.repo_path = Path(os.getcwd()) self.config = config @@ -69,28 +131,32 @@ def __init__( self.repo_path = Path(repo_path) self.config = load_repo_config(Path(repo_path)) else: - raise ValueError("Please specify one of repo_path or config") + raise ValueError("Please specify one of repo_path or config.") registry_config = self.config.get_registry_config() - self._registry = Registry( - registry_path=registry_config.path, - repo_path=self.repo_path, - cache_ttl=timedelta(seconds=registry_config.cache_ttl_seconds), - ) + self._registry = Registry(registry_config, repo_path=self.repo_path) + self._registry._initialize_registry() + self._provider = get_provider(self.config, self.repo_path) + self._go_server = None @log_exceptions def version(self) -> str: - """Returns the version of the current Feast SDK/CLI""" - + """Returns the version of the current Feast SDK/CLI.""" return get_version() + @property + def registry(self) -> Registry: + """Gets the registry of this feature store.""" + return self._registry + @property def project(self) -> str: + """Gets the project of this feature store.""" return self.config.project def _get_provider(self) -> Provider: # TODO: Bake self.repo_path into self.config so that we dont only have one interface to paths - return get_provider(self.config, self.repo_path) + return self._provider @log_exceptions_and_usage def refresh_registry(self): @@ -99,184 +165,633 @@ def refresh_registry(self): Explicitly calling this method allows for direct control of the state of the registry cache. Every time this method is called the complete registry state will be retrieved from the remote registry store backend (e.g., GCS, S3), and the cache timer will be reset. If refresh_registry() is run before get_online_features() - is called, then get_online_feature() will use the cached registry instead of retrieving (and caching) the + is called, then get_online_features() will use the cached registry instead of retrieving (and caching) the registry itself. Additionally, the TTL for the registry cache can be set to infinity (by setting it to 0), which means that refresh_registry() will become the only way to update the cached registry. If the TTL is set to a value greater than 0, then once the cache becomes stale (more time than the TTL has passed), a new cache will be - downloaded synchronously, which may increase latencies if the triggering method is get_online_features() + downloaded synchronously, which may increase latencies if the triggering method is get_online_features(). """ - registry_config = self.config.get_registry_config() - self._registry = Registry( - registry_path=registry_config.path, - repo_path=self.repo_path, - cache_ttl=timedelta(seconds=registry_config.cache_ttl_seconds), - ) - self._registry.refresh() + registry = Registry(registry_config, repo_path=self.repo_path) + registry.refresh() + + self._registry = registry @log_exceptions_and_usage def list_entities(self, allow_cache: bool = False) -> List[Entity]: """ - Retrieve a list of entities from the registry + Retrieves the list of entities from the registry. Args: - allow_cache (bool): Whether to allow returning entities from a cached registry + allow_cache: Whether to allow returning entities from a cached registry. Returns: - List of entities + A list of entities. """ + return self._list_entities(allow_cache) - return self._registry.list_entities(self.project, allow_cache=allow_cache) + def _list_entities( + self, allow_cache: bool = False, hide_dummy_entity: bool = True + ) -> List[Entity]: + all_entities = self._registry.list_entities( + self.project, allow_cache=allow_cache + ) + return [ + entity + for entity in all_entities + if entity.name != DUMMY_ENTITY_NAME or not hide_dummy_entity + ] @log_exceptions_and_usage - def list_feature_views(self) -> List[FeatureView]: + def list_feature_services(self) -> List[FeatureService]: """ - Retrieve a list of feature views from the registry + Retrieves the list of feature services from the registry. Returns: - List of feature views + A list of feature services. """ + return self._registry.list_feature_services(self.project) - return self._registry.list_feature_views(self.project) + @log_exceptions_and_usage + def list_feature_views(self, allow_cache: bool = False) -> List[FeatureView]: + """ + Retrieves the list of feature views from the registry. + + Args: + allow_cache: Whether to allow returning entities from a cached registry. + + Returns: + A list of feature views. + """ + return self._list_feature_views(allow_cache) + + @log_exceptions_and_usage + def list_request_feature_views( + self, allow_cache: bool = False + ) -> List[RequestFeatureView]: + """ + Retrieves the list of feature views from the registry. + + Args: + allow_cache: Whether to allow returning entities from a cached registry. + + Returns: + A list of feature views. + """ + return self._registry.list_request_feature_views( + self.project, allow_cache=allow_cache + ) + + def _list_feature_views( + self, allow_cache: bool = False, hide_dummy_entity: bool = True, + ) -> List[FeatureView]: + feature_views = [] + for fv in self._registry.list_feature_views( + self.project, allow_cache=allow_cache + ): + if hide_dummy_entity and fv.entities[0] == DUMMY_ENTITY_NAME: + fv.entities = [] + feature_views.append(fv) + return feature_views + + @log_exceptions_and_usage + def list_on_demand_feature_views( + self, allow_cache: bool = False + ) -> List[OnDemandFeatureView]: + """ + Retrieves the list of on demand feature views from the registry. + + Returns: + A list of on demand feature views. + """ + return self._registry.list_on_demand_feature_views( + self.project, allow_cache=allow_cache + ) + + @log_exceptions_and_usage + def list_data_sources(self, allow_cache: bool = False) -> List[DataSource]: + """ + Retrieves the list of data sources from the registry. + + Args: + allow_cache: Whether to allow returning data sources from a cached registry. + + Returns: + A list of data sources. + """ + return self._registry.list_data_sources(self.project, allow_cache=allow_cache) @log_exceptions_and_usage - def get_entity(self, name: str) -> Entity: + def get_entity(self, name: str, allow_registry_cache: bool = False) -> Entity: """ Retrieves an entity. Args: - name: Name of entity + name: Name of entity. + allow_registry_cache: (Optional) Whether to allow returning this entity from a cached registry Returns: - Returns either the specified entity, or raises an exception if - none is found + The specified entity. + + Raises: + EntityNotFoundException: The entity could not be found. """ + return self._registry.get_entity( + name, self.project, allow_cache=allow_registry_cache + ) - return self._registry.get_entity(name, self.project) + @log_exceptions_and_usage + def get_feature_service( + self, name: str, allow_cache: bool = False + ) -> FeatureService: + """ + Retrieves a feature service. + + Args: + name: Name of feature service. + allow_cache: Whether to allow returning feature services from a cached registry. + + Returns: + The specified feature service. + + Raises: + FeatureServiceNotFoundException: The feature service could not be found. + """ + return self._registry.get_feature_service(name, self.project, allow_cache) @log_exceptions_and_usage - def get_feature_view(self, name: str) -> FeatureView: + def get_feature_view( + self, name: str, allow_registry_cache: bool = False + ) -> FeatureView: """ Retrieves a feature view. Args: - name: Name of feature view + name: Name of feature view. + allow_registry_cache: (Optional) Whether to allow returning this entity from a cached registry Returns: - Returns either the specified feature view, or raises an exception if - none is found + The specified feature view. + + Raises: + FeatureViewNotFoundException: The feature view could not be found. """ + return self._get_feature_view(name, allow_registry_cache=allow_registry_cache) - return self._registry.get_feature_view(name, self.project) + def _get_feature_view( + self, + name: str, + hide_dummy_entity: bool = True, + allow_registry_cache: bool = False, + ) -> FeatureView: + feature_view = self._registry.get_feature_view( + name, self.project, allow_cache=allow_registry_cache + ) + if hide_dummy_entity and feature_view.entities[0] == DUMMY_ENTITY_NAME: + feature_view.entities = [] + return feature_view @log_exceptions_and_usage - def delete_feature_view(self, name: str): + def get_on_demand_feature_view(self, name: str) -> OnDemandFeatureView: """ - Deletes a feature view or raises an exception if not found. + Retrieves a feature view. Args: - name: Name of feature view + name: Name of feature view. + + Returns: + The specified feature view. + + Raises: + FeatureViewNotFoundException: The feature view could not be found. """ + return self._registry.get_on_demand_feature_view(name, self.project) + @log_exceptions_and_usage + def get_data_source(self, name: str) -> DataSource: + """ + Retrieves the list of data sources from the registry. + + Args: + name: Name of the data source. + + Returns: + The specified data source. + + Raises: + DataSourceObjectNotFoundException: The data source could not be found. + """ + return self._registry.get_data_source(name, self.project) + + @log_exceptions_and_usage + def delete_feature_view(self, name: str): + """ + Deletes a feature view. + + Args: + name: Name of feature view. + + Raises: + FeatureViewNotFoundException: The feature view could not be found. + """ return self._registry.delete_feature_view(name, self.project) + @log_exceptions_and_usage + def delete_feature_service(self, name: str): + """ + Deletes a feature service. + + Args: + name: Name of feature service. + + Raises: + FeatureServiceNotFoundException: The feature view could not be found. + """ + return self._registry.delete_feature_service(name, self.project) + + def _get_features( + self, features: Union[List[str], FeatureService], allow_cache: bool = False, + ) -> List[str]: + _features = features + + if not _features: + raise ValueError("No features specified for retrieval") + + _feature_refs = [] + if isinstance(_features, FeatureService): + feature_service_from_registry = self.get_feature_service( + _features.name, allow_cache + ) + if feature_service_from_registry != _features: + warnings.warn( + "The FeatureService object that has been passed in as an argument is" + "inconsistent with the version from Registry. Potentially a newer version" + "of the FeatureService has been applied to the registry." + ) + for projection in feature_service_from_registry.feature_view_projections: + _feature_refs.extend( + [ + f"{projection.name_to_use()}:{f.name}" + for f in projection.features + ] + ) + else: + assert isinstance(_features, list) + _feature_refs = _features + return _feature_refs + + def _should_use_plan(self): + """Returns True if _plan and _apply_diffs should be used, False otherwise.""" + # Currently only the local provider with sqlite online store supports _plan and _apply_diffs. + return self.config.provider == "local" and ( + self.config.online_store and self.config.online_store.type == "sqlite" + ) + + def _validate_all_feature_views( + self, + views_to_update: List[FeatureView], + odfvs_to_update: List[OnDemandFeatureView], + request_views_to_update: List[RequestFeatureView], + ): + """Validates all feature views.""" + if ( + not flags_helper.enable_on_demand_feature_views(self.config) + and len(odfvs_to_update) > 0 + ): + raise ExperimentalFeatureNotEnabled(flags.FLAG_ON_DEMAND_TRANSFORM_NAME) + + set_usage_attribute("odfv", bool(odfvs_to_update)) + + _validate_feature_views( + [*views_to_update, *odfvs_to_update, *request_views_to_update] + ) + + def _make_inferences( + self, + data_sources_to_update: List[DataSource], + entities_to_update: List[Entity], + views_to_update: List[FeatureView], + odfvs_to_update: List[OnDemandFeatureView], + ): + """Makes inferences for entities, feature views, and odfvs.""" + update_entities_with_inferred_types_from_feature_views( + entities_to_update, views_to_update, self.config + ) + + update_data_sources_with_inferred_event_timestamp_col( + data_sources_to_update, self.config + ) + + update_data_sources_with_inferred_event_timestamp_col( + [view.batch_source for view in views_to_update], self.config + ) + + # New feature views may reference previously applied entities. + entities = self._list_entities() + update_feature_views_with_inferred_features( + views_to_update, entities + entities_to_update, self.config + ) + + for odfv in odfvs_to_update: + odfv.infer_features() + + @log_exceptions_and_usage + def _plan( + self, desired_repo_contents: RepoContents + ) -> Tuple[RegistryDiff, InfraDiff, Infra]: + """Dry-run registering objects to metadata store. + + The plan method dry-runs registering one or more definitions (e.g., Entity, FeatureView), and produces + a list of all the changes the that would be introduced in the feature repo. The changes computed by the plan + command are for informational purposes, and are not actually applied to the registry. + + Args: + desired_repo_contents: The desired repo state. + + Raises: + ValueError: The 'objects' parameter could not be parsed properly. + + Examples: + Generate a plan adding an Entity and a FeatureView. + + >>> from feast import FeatureStore, Entity, FeatureView, Feature, ValueType, FileSource, RepoConfig + >>> from feast.feature_store import RepoContents + >>> from datetime import timedelta + >>> fs = FeatureStore(repo_path="feature_repo") + >>> driver = Entity(name="driver_id", value_type=ValueType.INT64, description="driver id") + >>> driver_hourly_stats = FileSource( + ... path="feature_repo/data/driver_stats.parquet", + ... timestamp_field="event_timestamp", + ... created_timestamp_column="created", + ... ) + >>> driver_hourly_stats_view = FeatureView( + ... name="driver_hourly_stats", + ... entities=["driver_id"], + ... ttl=timedelta(seconds=86400 * 1), + ... batch_source=driver_hourly_stats, + ... ) + >>> registry_diff, infra_diff, new_infra = fs._plan(RepoContents( + ... data_sources=[driver_hourly_stats], + ... feature_views=[driver_hourly_stats_view], + ... on_demand_feature_views=list(), + ... request_feature_views=list(), + ... entities=[driver], + ... feature_services=list())) # register entity and feature view + """ + # Validate and run inference on all the objects to be registered. + self._validate_all_feature_views( + desired_repo_contents.feature_views, + desired_repo_contents.on_demand_feature_views, + desired_repo_contents.request_feature_views, + ) + _validate_data_sources(desired_repo_contents.data_sources) + self._make_inferences( + desired_repo_contents.data_sources, + desired_repo_contents.entities, + desired_repo_contents.feature_views, + desired_repo_contents.on_demand_feature_views, + ) + + # Compute the desired difference between the current objects in the registry and + # the desired repo state. + registry_diff = diff_between( + self._registry, self.project, desired_repo_contents + ) + + # Compute the desired difference between the current infra, as stored in the registry, + # and the desired infra. + self._registry.refresh() + current_infra_proto = ( + self._registry.cached_registry_proto.infra.__deepcopy__() + if self._registry.cached_registry_proto + else InfraProto() + ) + desired_registry_proto = desired_repo_contents.to_registry_proto() + new_infra = self._provider.plan_infra(self.config, desired_registry_proto) + new_infra_proto = new_infra.to_proto() + infra_diff = diff_infra_protos(current_infra_proto, new_infra_proto) + + return registry_diff, infra_diff, new_infra + + @log_exceptions_and_usage + def _apply_diffs( + self, registry_diff: RegistryDiff, infra_diff: InfraDiff, new_infra: Infra + ): + """Applies the given diffs to the metadata store and infrastructure. + + Args: + registry_diff: The diff between the current registry and the desired registry. + infra_diff: The diff between the current infra and the desired infra. + new_infra: The desired infra. + """ + infra_diff.update() + apply_diff_to_registry( + self._registry, registry_diff, self.project, commit=False + ) + + self._registry.update_infra(new_infra, self.project, commit=True) + @log_exceptions_and_usage def apply( - self, objects: Union[Entity, FeatureView, List[Union[FeatureView, Entity]]] + self, + objects: Union[ + DataSource, + Entity, + FeatureView, + OnDemandFeatureView, + RequestFeatureView, + FeatureService, + List[FeastObject], + ], + objects_to_delete: Optional[List[FeastObject]] = None, + partial: bool = True, ): """Register objects to metadata store and update related infrastructure. The apply method registers one or more definitions (e.g., Entity, FeatureView) and registers or updates these - objects in the Feast registry. Once the registry has been updated, the apply method will update related - infrastructure (e.g., create tables in an online store) in order to reflect these new definitions. All - operations are idempotent, meaning they can safely be rerun. + objects in the Feast registry. Once the apply method has updated the infrastructure (e.g., create tables in + an online store), it will commit the updated registry. All operations are idempotent, meaning they can safely + be rerun. Args: - objects (List[Union[FeatureView, Entity]]): A list of FeatureView or Entity objects that should be - registered + objects: A single object, or a list of objects that should be registered with the Feature Store. + objects_to_delete: A list of objects to be deleted from the registry and removed from the + provider's infrastructure. This deletion will only be performed if partial is set to False. + partial: If True, apply will only handle the specified objects; if False, apply will also delete + all the objects in objects_to_delete, and tear down any associated cloud resources. + + Raises: + ValueError: The 'objects' parameter could not be parsed properly. Examples: - Register a single Entity and FeatureView. + Register an Entity and a FeatureView. - >>> from feast.feature_store import FeatureStore - >>> from feast import Entity, FeatureView, Feature, ValueType, FileSource + >>> from feast import FeatureStore, Entity, FeatureView, Feature, ValueType, FileSource, RepoConfig >>> from datetime import timedelta - >>> - >>> fs = FeatureStore() - >>> customer_entity = Entity(name="customer", value_type=ValueType.INT64, description="customer entity") - >>> customer_feature_view = FeatureView( - >>> name="customer_fv", - >>> entities=["customer"], - >>> features=[Feature(name="age", dtype=ValueType.INT64)], - >>> input=FileSource(path="file.parquet", event_timestamp_column="timestamp"), - >>> ttl=timedelta(days=1) - >>> ) - >>> fs.apply([customer_entity, customer_feature_view]) + >>> fs = FeatureStore(repo_path="feature_repo") + >>> driver = Entity(name="driver_id", value_type=ValueType.INT64, description="driver id") + >>> driver_hourly_stats = FileSource( + ... path="feature_repo/data/driver_stats.parquet", + ... timestamp_field="event_timestamp", + ... created_timestamp_column="created", + ... ) + >>> driver_hourly_stats_view = FeatureView( + ... name="driver_hourly_stats", + ... entities=["driver_id"], + ... ttl=timedelta(seconds=86400 * 1), + ... batch_source=driver_hourly_stats, + ... ) + >>> fs.apply([driver_hourly_stats_view, driver]) # register entity and feature view """ - # TODO: Add locking - - if isinstance(objects, Entity) or isinstance(objects, FeatureView): + if not isinstance(objects, Iterable): objects = [objects] assert isinstance(objects, list) - views_to_update = [ob for ob in objects if isinstance(ob, FeatureView)] + if not objects_to_delete: + objects_to_delete = [] + + # Separate all objects into entities, feature services, and different feature view types. entities_to_update = [ob for ob in objects if isinstance(ob, Entity)] + views_to_update = [ob for ob in objects if isinstance(ob, FeatureView)] + request_views_to_update = [ + ob for ob in objects if isinstance(ob, RequestFeatureView) + ] + odfvs_to_update = [ob for ob in objects if isinstance(ob, OnDemandFeatureView)] + services_to_update = [ob for ob in objects if isinstance(ob, FeatureService)] + data_sources_set_to_update = { + ob for ob in objects if isinstance(ob, DataSource) + } + + for fv in views_to_update: + data_sources_set_to_update.add(fv.batch_source) + if fv.stream_source: + data_sources_set_to_update.add(fv.stream_source) + + if request_views_to_update: + warnings.warn( + "Request feature view is deprecated. " + "Please use request data source instead", + DeprecationWarning, + ) - # Make inferences - update_entities_with_inferred_types_from_feature_views( - entities_to_update, views_to_update, self.config - ) + for rfv in request_views_to_update: + data_sources_set_to_update.add(rfv.request_data_source) - update_data_sources_with_inferred_event_timestamp_col( - [view.input for view in views_to_update], self.config - ) + for odfv in odfvs_to_update: + for v in odfv.source_request_sources.values(): + data_sources_set_to_update.add(v) + + data_sources_to_update = list(data_sources_set_to_update) - for view in views_to_update: - view.infer_features_from_input_source(self.config) + # Validate all feature views and make inferences. + self._validate_all_feature_views( + views_to_update, odfvs_to_update, request_views_to_update + ) + self._make_inferences( + data_sources_to_update, entities_to_update, views_to_update, odfvs_to_update + ) - if len(views_to_update) + len(entities_to_update) != len(objects): - raise ValueError("Unknown object type provided as part of apply() call") + # Handle all entityless feature views by using DUMMY_ENTITY as a placeholder entity. + entities_to_update.append(DUMMY_ENTITY) - for view in views_to_update: + # Add all objects to the registry and update the provider's infrastructure. + for ds in data_sources_to_update: + self._registry.apply_data_source(ds, project=self.project, commit=False) + for view in itertools.chain( + views_to_update, odfvs_to_update, request_views_to_update + ): self._registry.apply_feature_view(view, project=self.project, commit=False) for ent in entities_to_update: self._registry.apply_entity(ent, project=self.project, commit=False) - self._registry.commit() + for feature_service in services_to_update: + self._registry.apply_feature_service( + feature_service, project=self.project, commit=False + ) + + if not partial: + # Delete all registry objects that should not exist. + entities_to_delete = [ + ob for ob in objects_to_delete if isinstance(ob, Entity) + ] + views_to_delete = [ + ob for ob in objects_to_delete if isinstance(ob, FeatureView) + ] + request_views_to_delete = [ + ob for ob in objects_to_delete if isinstance(ob, RequestFeatureView) + ] + odfvs_to_delete = [ + ob for ob in objects_to_delete if isinstance(ob, OnDemandFeatureView) + ] + services_to_delete = [ + ob for ob in objects_to_delete if isinstance(ob, FeatureService) + ] + data_sources_to_delete = [ + ob for ob in objects_to_delete if isinstance(ob, DataSource) + ] + + for data_source in data_sources_to_delete: + self._registry.delete_data_source( + data_source.name, project=self.project, commit=False + ) + for entity in entities_to_delete: + self._registry.delete_entity( + entity.name, project=self.project, commit=False + ) + for view in views_to_delete: + self._registry.delete_feature_view( + view.name, project=self.project, commit=False + ) + for request_view in request_views_to_delete: + self._registry.delete_feature_view( + request_view.name, project=self.project, commit=False + ) + for odfv in odfvs_to_delete: + self._registry.delete_feature_view( + odfv.name, project=self.project, commit=False + ) + for service in services_to_delete: + self._registry.delete_feature_service( + service.name, project=self.project, commit=False + ) self._get_provider().update_infra( project=self.project, - tables_to_delete=[], + tables_to_delete=views_to_delete if not partial else [], tables_to_keep=views_to_update, - entities_to_delete=[], + entities_to_delete=entities_to_delete if not partial else [], entities_to_keep=entities_to_update, - partial=True, + partial=partial, ) + self._registry.commit() + + # go server needs to be reloaded to apply new configuration. + # we're stopping it here + # new server will be instantiated on the next online request + self._teardown_go_server() + @log_exceptions_and_usage def teardown(self): - tables: List[Union[FeatureView, FeatureTable]] = [] + """Tears down all local and cloud resources for the feature store.""" + tables: List[FeatureView] = [] feature_views = self.list_feature_views() - feature_tables = self._registry.list_feature_tables(self.project) tables.extend(feature_views) - tables.extend(feature_tables) entities = self.list_entities() self._get_provider().teardown_infra(self.project, tables, entities) - for feature_view in feature_views: - self.delete_feature_view(feature_view.name) - for feature_table in feature_tables: - self._registry.delete_feature_table(feature_table.name, self.project) + self._registry.teardown() + self._teardown_go_server() @log_exceptions_and_usage def get_historical_features( self, entity_df: Union[pd.DataFrame, str], - feature_refs: List[str], + features: Union[List[str], FeatureService], full_feature_names: bool = False, ) -> RetrievalJob: """Enrich an entity dataframe with historical feature values for either training or batch scoring. @@ -297,41 +812,101 @@ def get_historical_features( columns (e.g., customer_id, driver_id) on which features need to be joined, as well as a event_timestamp column used to ensure point-in-time correctness. Either a Pandas DataFrame can be provided or a string SQL query. The query must be of a format supported by the configured offline store (e.g., BigQuery) - feature_refs: A list of features that should be retrieved from the offline store. Feature references are of - the format "feature_view:feature", e.g., "customer_fv:daily_transactions". - full_feature_names: A boolean that provides the option to add the feature view prefixes to the feature names, - changing them from the format "feature" to "feature_view__feature" (e.g., "daily_transactions" changes to - "customer_fv__daily_transactions"). By default, this value is set to False. + features: The list of features that should be retrieved from the offline store. These features can be + specified either as a list of string feature references or as a feature service. String feature + references must have format "feature_view:feature", e.g. "customer_fv:daily_transactions". + full_feature_names: If True, feature names will be prefixed with the corresponding feature view name, + changing them from the format "feature" to "feature_view__feature" (e.g. "daily_transactions" + changes to "customer_fv__daily_transactions"). Returns: RetrievalJob which can be used to materialize the results. - Examples: - Retrieve historical features using a BigQuery SQL entity dataframe + Raises: + ValueError: Both or neither of features and feature_refs are specified. - >>> from feast.feature_store import FeatureStore - >>> - >>> fs = FeatureStore(config=RepoConfig(provider="gcp")) + Examples: + Retrieve historical features from a local offline store. + + >>> from feast import FeatureStore, RepoConfig + >>> import pandas as pd + >>> fs = FeatureStore(repo_path="feature_repo") + >>> entity_df = pd.DataFrame.from_dict( + ... { + ... "driver_id": [1001, 1002], + ... "event_timestamp": [ + ... datetime(2021, 4, 12, 10, 59, 42), + ... datetime(2021, 4, 12, 8, 12, 10), + ... ], + ... } + ... ) >>> retrieval_job = fs.get_historical_features( - >>> entity_df="SELECT event_timestamp, order_id, customer_id from gcp_project.my_ds.customer_orders", - >>> feature_refs=["customer:age", "customer:avg_orders_1d", "customer:avg_orders_7d"], - >>> ) + ... entity_df=entity_df, + ... features=[ + ... "driver_hourly_stats:conv_rate", + ... "driver_hourly_stats:acc_rate", + ... "driver_hourly_stats:avg_daily_trips", + ... ], + ... ) >>> feature_data = retrieval_job.to_df() - >>> model.fit(feature_data) # insert your modeling framework here. """ - all_feature_views = self._registry.list_feature_views(project=self.project) + _feature_refs = self._get_features(features) + ( + all_feature_views, + all_request_feature_views, + all_on_demand_feature_views, + ) = self._get_feature_views_to_use(features) + + if all_request_feature_views: + warnings.warn( + "Request feature view is deprecated. " + "Please use request data source instead", + DeprecationWarning, + ) - _validate_feature_refs(feature_refs, full_feature_names) - feature_views = list( - view for view, _ in _group_feature_refs(feature_refs, all_feature_views) + # TODO(achal): _group_feature_refs returns the on demand feature views, but it's no passed into the provider. + # This is a weird interface quirk - we should revisit the `get_historical_features` to + # pass in the on demand feature views as well. + fvs, odfvs, request_fvs, request_fv_refs = _group_feature_refs( + _feature_refs, + all_feature_views, + all_request_feature_views, + all_on_demand_feature_views, ) + feature_views = list(view for view, _ in fvs) + on_demand_feature_views = list(view for view, _ in odfvs) + request_feature_views = list(view for view, _ in request_fvs) + + set_usage_attribute("odfv", bool(on_demand_feature_views)) + set_usage_attribute("request_fv", bool(request_feature_views)) + + # Check that the right request data is present in the entity_df + if type(entity_df) == pd.DataFrame: + entity_pd_df = cast(pd.DataFrame, entity_df) + for fv in request_feature_views: + for feature in fv.features: + if feature.name not in entity_pd_df.columns: + raise RequestDataNotFoundInEntityDfException( + feature_name=feature.name, feature_view_name=fv.name + ) + for odfv in on_demand_feature_views: + odfv_request_data_schema = odfv.get_request_data_schema() + for feature_name in odfv_request_data_schema.keys(): + if feature_name not in entity_pd_df.columns: + raise RequestDataNotFoundInEntityDfException( + feature_name=feature_name, feature_view_name=odfv.name, + ) + _validate_feature_refs(_feature_refs, full_feature_names) + # Drop refs that refer to RequestFeatureViews since they don't need to be fetched and + # already exist in the entity_df + _feature_refs = [ref for ref in _feature_refs if ref not in request_fv_refs] provider = self._get_provider() job = provider.get_historical_features( self.config, feature_views, - feature_refs, + _feature_refs, entity_df, self._registry, self.project, @@ -340,6 +915,96 @@ def get_historical_features( return job + @log_exceptions_and_usage + def create_saved_dataset( + self, + from_: RetrievalJob, + name: str, + storage: SavedDatasetStorage, + tags: Optional[Dict[str, str]] = None, + feature_service: Optional[FeatureService] = None, + ) -> SavedDataset: + """ + Execute provided retrieval job and persist its outcome in given storage. + Storage type (eg, BigQuery or Redshift) must be the same as globally configured offline store. + After data successfully persisted saved dataset object with dataset metadata is committed to the registry. + Name for the saved dataset should be unique within project, since it's possible to overwrite previously stored dataset + with the same name. + + Returns: + SavedDataset object with attached RetrievalJob + + Raises: + ValueError if given retrieval job doesn't have metadata + """ + warnings.warn( + "Saving dataset is an experimental feature. " + "This API is unstable and it could and most probably will be changed in the future. " + "We do not guarantee that future changes will maintain backward compatibility.", + RuntimeWarning, + ) + + if not from_.metadata: + raise ValueError( + "RetrievalJob must contains metadata. " + "Use RetrievalJob produced by get_historical_features" + ) + + dataset = SavedDataset( + name=name, + features=from_.metadata.features, + join_keys=from_.metadata.keys, + full_feature_names=from_.full_feature_names, + storage=storage, + tags=tags, + feature_service_name=feature_service.name if feature_service else None, + ) + + dataset.min_event_timestamp = from_.metadata.min_event_timestamp + dataset.max_event_timestamp = from_.metadata.max_event_timestamp + + from_.persist(storage) + + dataset = dataset.with_retrieval_job( + self._get_provider().retrieve_saved_dataset( + config=self.config, dataset=dataset + ) + ) + + self._registry.apply_saved_dataset(dataset, self.project, commit=True) + return dataset + + @log_exceptions_and_usage + def get_saved_dataset(self, name: str) -> SavedDataset: + """ + Find a saved dataset in the registry by provided name and + create a retrieval job to pull whole dataset from storage (offline store). + + If dataset couldn't be found by provided name SavedDatasetNotFound exception will be raised. + + Data will be retrieved from globally configured offline store. + + Returns: + SavedDataset with RetrievalJob attached + + Raises: + SavedDatasetNotFound + """ + warnings.warn( + "Retrieving datasets is an experimental feature. " + "This API is unstable and it could and most probably will be changed in the future. " + "We do not guarantee that future changes will maintain backward compatibility.", + RuntimeWarning, + ) + + dataset = self._registry.get_saved_dataset(name, self.project) + provider = self._get_provider() + + retrieval_job = provider.retrieve_saved_dataset( + config=self.config, dataset=dataset + ) + return dataset.with_retrieval_job(retrieval_job) + @log_exceptions_and_usage def materialize_incremental( self, end_date: datetime, feature_views: Optional[List[str]] = None, @@ -358,24 +1023,35 @@ def materialize_incremental( feature_views (List[str]): Optional list of feature view names. If selected, will only run materialization for the specified feature views. + Raises: + Exception: A feature view being materialized does not have a TTL set. + Examples: Materialize all features into the online store up to 5 minutes ago. + >>> from feast import FeatureStore, RepoConfig >>> from datetime import datetime, timedelta - >>> from feast.feature_store import FeatureStore - >>> - >>> fs = FeatureStore(config=RepoConfig(provider="gcp", registry="gs://my-fs/", project="my_fs_proj")) + >>> fs = FeatureStore(repo_path="feature_repo") >>> fs.materialize_incremental(end_date=datetime.utcnow() - timedelta(minutes=5)) + Materializing... + + ... """ - - feature_views_to_materialize = [] + feature_views_to_materialize: List[FeatureView] = [] if feature_views is None: - feature_views_to_materialize = self._registry.list_feature_views( - self.project + feature_views_to_materialize = self._list_feature_views( + hide_dummy_entity=False ) + feature_views_to_materialize = [ + fv for fv in feature_views_to_materialize if fv.online + ] else: for name in feature_views: - feature_view = self._registry.get_feature_view(name, self.project) + feature_view = self._get_feature_view(name, hide_dummy_entity=False) + if not feature_view.online: + raise ValueError( + f"FeatureView {feature_view.name} is not configured to be served online." + ) feature_views_to_materialize.append(feature_view) _print_materialization_log( @@ -418,7 +1094,7 @@ def tqdm_builder(length): ) self._registry.apply_materialization( - feature_view, self.project, start_date, end_date + feature_view, self.project, start_date, end_date, ) @log_exceptions_and_usage @@ -445,28 +1121,36 @@ def materialize( Materialize all features into the online store over the interval from 3 hours ago to 10 minutes ago. + >>> from feast import FeatureStore, RepoConfig >>> from datetime import datetime, timedelta - >>> from feast.feature_store import FeatureStore - >>> - >>> fs = FeatureStore(config=RepoConfig(provider="gcp")) + >>> fs = FeatureStore(repo_path="feature_repo") >>> fs.materialize( - >>> start_date=datetime.utcnow() - timedelta(hours=3), end_date=datetime.utcnow() - timedelta(minutes=10) - >>> ) + ... start_date=datetime.utcnow() - timedelta(hours=3), end_date=datetime.utcnow() - timedelta(minutes=10) + ... ) + Materializing... + + ... """ - if utils.make_tzaware(start_date) > utils.make_tzaware(end_date): raise ValueError( f"The given start_date {start_date} is greater than the given end_date {end_date}." ) - feature_views_to_materialize = [] + feature_views_to_materialize: List[FeatureView] = [] if feature_views is None: - feature_views_to_materialize = self._registry.list_feature_views( - self.project + feature_views_to_materialize = self._list_feature_views( + hide_dummy_entity=False ) + feature_views_to_materialize = [ + fv for fv in feature_views_to_materialize if fv.online + ] else: for name in feature_views: - feature_view = self._registry.get_feature_view(name, self.project) + feature_view = self._get_feature_view(name, hide_dummy_entity=False) + if not feature_view.online: + raise ValueError( + f"FeatureView {feature_view.name} is not configured to be served online." + ) feature_views_to_materialize.append(feature_view) _print_materialization_log( @@ -497,13 +1181,71 @@ def tqdm_builder(length): ) self._registry.apply_materialization( - feature_view, self.project, start_date, end_date + feature_view, self.project, start_date, end_date, ) + @log_exceptions_and_usage + def push( + self, push_source_name: str, df: pd.DataFrame, allow_registry_cache: bool = True + ): + """ + Push features to a push source. This updates all the feature views that have the push source as stream source. + Args: + push_source_name: The name of the push source we want to push data to. + df: the data being pushed. + allow_registry_cache: whether to allow cached versions of the registry. + """ + warnings.warn( + "Push source is an experimental feature. " + "This API is unstable and it could and might change in the future. " + "We do not guarantee that future changes will maintain backward compatibility.", + RuntimeWarning, + ) + from feast.data_source import PushSource + + all_fvs = self.list_feature_views(allow_cache=allow_registry_cache) + + fvs_with_push_sources = { + fv + for fv in all_fvs + if ( + fv.stream_source is not None + and isinstance(fv.stream_source, PushSource) + and fv.stream_source.name == push_source_name + ) + } + + for fv in fvs_with_push_sources: + self.write_to_online_store( + fv.name, df, allow_registry_cache=allow_registry_cache + ) + + @log_exceptions_and_usage + def write_to_online_store( + self, + feature_view_name: str, + df: pd.DataFrame, + allow_registry_cache: bool = True, + ): + """ + ingests data directly into the Online store + """ + # TODO: restrict this to work with online StreamFeatureViews and validate the FeatureView type + feature_view = self.get_feature_view( + feature_view_name, allow_registry_cache=allow_registry_cache + ) + entities = [] + for entity_name in feature_view.entities: + entities.append( + self.get_entity(entity_name, allow_registry_cache=allow_registry_cache) + ) + provider = self._get_provider() + provider.ingest_df(feature_view, entities, df) + @log_exceptions_and_usage def get_online_features( self, - feature_refs: List[str], + features: Union[List[str], FeatureService], entity_rows: List[Dict[str, Any]], full_feature_names: bool = False, ) -> OnlineResponse: @@ -519,121 +1261,742 @@ def get_online_features( infinity (cache forever). Args: - feature_refs: List of feature references that will be returned for each entity. - Each feature reference should have the following format: - "feature_table:feature" where "feature_table" & "feature" refer to - the feature and feature table names respectively. - Only the feature name is required. + features: The list of features that should be retrieved from the online store. These features can be + specified either as a list of string feature references or as a feature service. String feature + references must have format "feature_view:feature", e.g. "customer_fv:daily_transactions". entity_rows: A list of dictionaries where each key-value is an entity-name, entity-value pair. + full_feature_names: If True, feature names will be prefixed with the corresponding feature view name, + changing them from the format "feature" to "feature_view__feature" (e.g. "daily_transactions" + changes to "customer_fv__daily_transactions"). + Returns: OnlineResponse containing the feature data in records. + + Raises: + Exception: No entity with the specified name exists. + Examples: - >>> from feast import FeatureStore - >>> - >>> store = FeatureStore(repo_path="...") - >>> feature_refs = ["sales:daily_transactions"] - >>> entity_rows = [{"customer_id": 0},{"customer_id": 1}] - >>> - >>> online_response = store.get_online_features( - >>> feature_refs, entity_rows) + Retrieve online features from an online store. + + >>> from feast import FeatureStore, RepoConfig + >>> fs = FeatureStore(repo_path="feature_repo") + >>> online_response = fs.get_online_features( + ... features=[ + ... "driver_hourly_stats:conv_rate", + ... "driver_hourly_stats:acc_rate", + ... "driver_hourly_stats:avg_daily_trips", + ... ], + ... entity_rows=[{"driver_id": 1001}, {"driver_id": 1002}, {"driver_id": 1003}, {"driver_id": 1004}], + ... ) >>> online_response_dict = online_response.to_dict() - >>> print(online_response_dict) - {'sales:daily_transactions': [1.1,1.2], 'sales:customer_id': [0,1]} """ + columnar: Dict[str, List[Any]] = {k: [] for k in entity_rows[0].keys()} + for entity_row in entity_rows: + for key, value in entity_row.items(): + try: + columnar[key].append(value) + except KeyError as e: + raise ValueError("All entity_rows must have the same keys.") from e + + return self._get_online_features( + features=features, + entity_values=columnar, + full_feature_names=full_feature_names, + native_entity_values=True, + ) + + def _get_online_features( + self, + features: Union[List[str], FeatureService], + entity_values: Mapping[ + str, Union[Sequence[Any], Sequence[Value], RepeatedValue] + ], + full_feature_names: bool = False, + native_entity_values: bool = True, + ): + # Extract Sequence from RepeatedValue Protobuf. + entity_value_lists: Dict[str, Union[List[Any], List[Value]]] = { + k: list(v) if isinstance(v, Sequence) else list(v.val) + for k, v in entity_values.items() + } + + # If Go feature server is enabled, send request to it instead of going through regular Python logic + if self.config.go_feature_retrieval: + from feast.embedded_go.online_features_service import ( + EmbeddedOnlineFeatureServer, + ) + + # Lazily start the go server on the first request + if self._go_server is None: + self._go_server = EmbeddedOnlineFeatureServer( + str(self.repo_path.absolute()), self.config, self + ) + + entity_native_values: Dict[str, List[Any]] + if not native_entity_values: + # Convert proto types to native types since Go feature server currently + # only handles native types. + # TODO(felixwang9817): Remove this logic once native types are supported. + entity_native_values = { + k: [ + feast_value_type_to_python_type(proto_value) + for proto_value in v + ] + for k, v in entity_value_lists.items() + } + else: + entity_native_values = entity_value_lists + + return self._go_server.get_online_features( + features_refs=features if isinstance(features, list) else [], + feature_service=features + if isinstance(features, FeatureService) + else None, + entities=entity_native_values, + request_data={}, # TODO: add request data parameter to public API + full_feature_names=full_feature_names, + ) + + _feature_refs = self._get_features(features, allow_cache=True) + ( + requested_feature_views, + requested_request_feature_views, + requested_on_demand_feature_views, + ) = self._get_feature_views_to_use( + features=features, allow_cache=True, hide_dummy_entity=False + ) + + if requested_request_feature_views: + warnings.warn( + "Request feature view is deprecated. " + "Please use request data source instead", + DeprecationWarning, + ) + + ( + entity_name_to_join_key_map, + entity_type_map, + join_keys_set, + ) = self._get_entity_maps(requested_feature_views) + + entity_proto_values: Dict[str, List[Value]] + if native_entity_values: + # Convert values to Protobuf once. + entity_proto_values = { + k: python_values_to_proto_values( + v, entity_type_map.get(k, ValueType.UNKNOWN) + ) + for k, v in entity_value_lists.items() + } + else: + entity_proto_values = entity_value_lists + + num_rows = _validate_entity_values(entity_proto_values) + _validate_feature_refs(_feature_refs, full_feature_names) + ( + grouped_refs, + grouped_odfv_refs, + grouped_request_fv_refs, + _, + ) = _group_feature_refs( + _feature_refs, + requested_feature_views, + requested_request_feature_views, + requested_on_demand_feature_views, + ) + set_usage_attribute("odfv", bool(grouped_odfv_refs)) + set_usage_attribute("request_fv", bool(grouped_request_fv_refs)) + + # All requested features should be present in the result. + requested_result_row_names = { + feat_ref.replace(":", "__") for feat_ref in _feature_refs + } + if not full_feature_names: + requested_result_row_names = { + name.rpartition("__")[-1] for name in requested_result_row_names + } + + feature_views = list(view for view, _ in grouped_refs) + + needed_request_data, needed_request_fv_features = self.get_needed_request_data( + grouped_odfv_refs, grouped_request_fv_refs + ) + + join_key_values: Dict[str, List[Value]] = {} + request_data_features: Dict[str, List[Value]] = {} + # Entity rows may be either entities or request data. + for join_key_or_entity_name, values in entity_proto_values.items(): + # Found request data + if ( + join_key_or_entity_name in needed_request_data + or join_key_or_entity_name in needed_request_fv_features + ): + if join_key_or_entity_name in needed_request_fv_features: + # If the data was requested as a feature then + # make sure it appears in the result. + requested_result_row_names.add(join_key_or_entity_name) + request_data_features[join_key_or_entity_name] = values + else: + if join_key_or_entity_name in join_keys_set: + join_key = join_key_or_entity_name + else: + try: + join_key = entity_name_to_join_key_map[join_key_or_entity_name] + except KeyError: + raise EntityNotFoundException( + join_key_or_entity_name, self.project + ) + else: + warnings.warn( + "Using entity name is deprecated. Use join_key instead." + ) + + # All join keys should be returned in the result. + requested_result_row_names.add(join_key) + join_key_values[join_key] = values + + self.ensure_request_data_values_exist( + needed_request_data, needed_request_fv_features, request_data_features + ) + + # Populate online features response proto with join keys and request data features + online_features_response = GetOnlineFeaturesResponse(results=[]) + self._populate_result_rows_from_columnar( + online_features_response=online_features_response, + data=dict(**join_key_values, **request_data_features), + ) + + # Add the Entityless case after populating result rows to avoid having to remove + # it later. + entityless_case = DUMMY_ENTITY_NAME in [ + entity_name + for feature_view in feature_views + for entity_name in feature_view.entities + ] + if entityless_case: + join_key_values[DUMMY_ENTITY_ID] = python_values_to_proto_values( + [DUMMY_ENTITY_VAL] * num_rows, DUMMY_ENTITY.value_type + ) provider = self._get_provider() - entities = self.list_entities(allow_cache=True) - entity_name_to_join_key_map = {} + for table, requested_features in grouped_refs: + # Get the correct set of entity values with the correct join keys. + table_entity_values, idxs = self._get_unique_entities( + table, join_key_values, entity_name_to_join_key_map, + ) + + # Fetch feature data for the minimum set of Entities. + feature_data = self._read_from_online_store( + table_entity_values, provider, requested_features, table, + ) + + # Populate the result_rows with the Features from the OnlineStore inplace. + self._populate_response_from_feature_data( + feature_data, + idxs, + online_features_response, + full_feature_names, + requested_features, + table, + ) + + if grouped_odfv_refs: + self._augment_response_with_on_demand_transforms( + online_features_response, + _feature_refs, + requested_on_demand_feature_views, + full_feature_names, + ) + + self._drop_unneeded_columns( + online_features_response, requested_result_row_names + ) + return OnlineResponse(online_features_response) + + @staticmethod + def _get_columnar_entity_values( + rowise: Optional[List[Dict[str, Any]]], columnar: Optional[Dict[str, List[Any]]] + ) -> Dict[str, List[Any]]: + if (rowise is None and columnar is None) or ( + rowise is not None and columnar is not None + ): + raise ValueError( + "Exactly one of `columnar_entity_values` and `rowise_entity_values` must be set." + ) + + if rowise is not None: + # Convert entity_rows from rowise to columnar. + res = defaultdict(list) + for entity_row in rowise: + for key, value in entity_row.items(): + res[key].append(value) + return res + return cast(Dict[str, List[Any]], columnar) + + def _get_entity_maps( + self, feature_views + ) -> Tuple[Dict[str, str], Dict[str, ValueType], Set[str]]: + entities = self._list_entities(allow_cache=True, hide_dummy_entity=False) + entity_name_to_join_key_map: Dict[str, str] = {} + entity_type_map: Dict[str, ValueType] = {} for entity in entities: entity_name_to_join_key_map[entity.name] = entity.join_key + entity_type_map[entity.name] = entity.value_type + for feature_view in feature_views: + for entity_name in feature_view.entities: + entity = self._registry.get_entity( + entity_name, self.project, allow_cache=True + ) + # User directly uses join_key as the entity reference in the entity_rows for the + # entity mapping case. + entity_name = feature_view.projection.join_key_map.get( + entity.join_key, entity.name + ) + join_key = feature_view.projection.join_key_map.get( + entity.join_key, entity.join_key + ) + entity_name_to_join_key_map[entity_name] = join_key + entity_type_map[join_key] = entity.value_type + return ( + entity_name_to_join_key_map, + entity_type_map, + set(entity_name_to_join_key_map.values()), + ) - join_key_rows = [] - for row in entity_rows: - join_key_row = {} - for entity_name, entity_value in row.items(): - try: - join_key = entity_name_to_join_key_map[entity_name] - except KeyError: - raise Exception( - f"Entity {entity_name} does not exist in project {self.project}" - ) - join_key_row[join_key] = entity_value - join_key_rows.append(join_key_row) + @staticmethod + def _get_table_entity_values( + table: FeatureView, + entity_name_to_join_key_map: Dict[str, str], + join_key_proto_values: Dict[str, List[Value]], + ) -> Dict[str, List[Value]]: + # The correct join_keys expected by the OnlineStore for this Feature View. + table_join_keys = [ + entity_name_to_join_key_map[entity_name] for entity_name in table.entities + ] - entity_row_proto_list = _infer_online_entity_rows(join_key_rows) + # If the FeatureView has a Projection then the join keys may be aliased. + alias_to_join_key_map = {v: k for k, v in table.projection.join_key_map.items()} + + # Subset to columns which are relevant to this FeatureView and + # give them the correct names. + entity_values = { + alias_to_join_key_map.get(k, k): v + for k, v in join_key_proto_values.items() + if alias_to_join_key_map.get(k, k) in table_join_keys + } + return entity_values + + @staticmethod + def _populate_result_rows_from_columnar( + online_features_response: GetOnlineFeaturesResponse, + data: Dict[str, List[Value]], + ): + timestamp = Timestamp() # Only initialize this timestamp once. + # Add more values to the existing result rows + for feature_name, feature_values in data.items(): + online_features_response.metadata.feature_names.val.append(feature_name) + online_features_response.results.append( + GetOnlineFeaturesResponse.FeatureVector( + values=feature_values, + statuses=[FieldStatus.PRESENT] * len(feature_values), + event_timestamps=[timestamp] * len(feature_values), + ) + ) - union_of_entity_keys = [] - result_rows: List[GetOnlineFeaturesResponse.FieldValues] = [] + @staticmethod + def get_needed_request_data( + grouped_odfv_refs: List[Tuple[OnDemandFeatureView, List[str]]], + grouped_request_fv_refs: List[Tuple[RequestFeatureView, List[str]]], + ) -> Tuple[Set[str], Set[str]]: + needed_request_data: Set[str] = set() + needed_request_fv_features: Set[str] = set() + for odfv, _ in grouped_odfv_refs: + odfv_request_data_schema = odfv.get_request_data_schema() + needed_request_data.update(odfv_request_data_schema.keys()) + for request_fv, _ in grouped_request_fv_refs: + for feature in request_fv.features: + needed_request_fv_features.add(feature.name) + return needed_request_data, needed_request_fv_features + + @staticmethod + def ensure_request_data_values_exist( + needed_request_data: Set[str], + needed_request_fv_features: Set[str], + request_data_features: Dict[str, List[Any]], + ): + if len(needed_request_data) + len(needed_request_fv_features) != len( + request_data_features.keys() + ): + missing_features = [ + x + for x in itertools.chain( + needed_request_data, needed_request_fv_features + ) + if x not in request_data_features + ] + raise RequestDataNotFoundInEntityRowsException( + feature_names=missing_features + ) - for entity_row_proto in entity_row_proto_list: - union_of_entity_keys.append(_entity_row_to_key(entity_row_proto)) - result_rows.append(_entity_row_to_field_values(entity_row_proto)) + def _get_unique_entities( + self, + table: FeatureView, + join_key_values: Dict[str, List[Value]], + entity_name_to_join_key_map: Dict[str, str], + ) -> Tuple[Tuple[Dict[str, Value], ...], Tuple[List[int], ...]]: + """Return the set of unique composite Entities for a Feature View and the indexes at which they appear. + + This method allows us to query the OnlineStore for data we need only once + rather than requesting and processing data for the same combination of + Entities multiple times. + """ + # Get the correct set of entity values with the correct join keys. + table_entity_values = self._get_table_entity_values( + table, entity_name_to_join_key_map, join_key_values, + ) - all_feature_views = self._registry.list_feature_views( - project=self.project, allow_cache=True + # Convert back to rowise. + keys = table_entity_values.keys() + # Sort the rowise data to allow for grouping but keep original index. This lambda is + # sufficient as Entity types cannot be complex (ie. lists). + rowise = list(enumerate(zip(*table_entity_values.values()))) + rowise.sort( + key=lambda row: tuple(getattr(x, x.WhichOneof("val")) for x in row[1]) ) - _validate_feature_refs(feature_refs, full_feature_names) - grouped_refs = _group_feature_refs(feature_refs, all_feature_views) - for table, requested_features in grouped_refs: - entity_keys = _get_table_entity_keys( - table, union_of_entity_keys, entity_name_to_join_key_map + # Identify unique entities and the indexes at which they occur. + unique_entities: Tuple[Dict[str, Value], ...] + indexes: Tuple[List[int], ...] + unique_entities, indexes = tuple( + zip( + *[ + (dict(zip(keys, k)), [_[0] for _ in g]) + for k, g in itertools.groupby(rowise, key=lambda x: x[1]) + ] ) - read_rows = provider.online_read( - config=self.config, - table=table, - entity_keys=entity_keys, - requested_features=requested_features, - ) - for row_idx, read_row in enumerate(read_rows): - row_ts, feature_data = read_row - result_row = result_rows[row_idx] - - if feature_data is None: - for feature_name in requested_features: - feature_ref = ( - f"{table.name}__{feature_name}" - if full_feature_names - else feature_name - ) - result_row.statuses[ - feature_ref - ] = GetOnlineFeaturesResponse.FieldStatus.NOT_FOUND + ) + return unique_entities, indexes + + def _read_from_online_store( + self, + entity_rows: Iterable[Mapping[str, Value]], + provider: Provider, + requested_features: List[str], + table: FeatureView, + ) -> List[Tuple[List[Timestamp], List["FieldStatus.ValueType"], List[Value]]]: + """Read and process data from the OnlineStore for a given FeatureView. + + This method guarantees that the order of the data in each element of the + List returned is the same as the order of `requested_features`. + + This method assumes that `provider.online_read` returns data for each + combination of Entities in `entity_rows` in the same order as they + are provided. + """ + # Instantiate one EntityKeyProto per Entity. + entity_key_protos = [ + EntityKeyProto(join_keys=row.keys(), entity_values=row.values()) + for row in entity_rows + ] + + # Fetch data for Entities. + read_rows = provider.online_read( + config=self.config, + table=table, + entity_keys=entity_key_protos, + requested_features=requested_features, + ) + + # Each row is a set of features for a given entity key. We only need to convert + # the data to Protobuf once. + null_value = Value() + read_row_protos = [] + for read_row in read_rows: + row_ts_proto = Timestamp() + row_ts, feature_data = read_row + # TODO (Ly): reuse whatever timestamp if row_ts is None? + if row_ts is not None: + row_ts_proto.FromDatetime(row_ts) + event_timestamps = [row_ts_proto] * len(requested_features) + if feature_data is None: + statuses = [FieldStatus.NOT_FOUND] * len(requested_features) + values = [null_value] * len(requested_features) + else: + statuses = [] + values = [] + for feature_name in requested_features: + # Make sure order of data is the same as requested_features. + if feature_name not in feature_data: + statuses.append(FieldStatus.NOT_FOUND) + values.append(null_value) + else: + statuses.append(FieldStatus.PRESENT) + values.append(feature_data[feature_name]) + read_row_protos.append((event_timestamps, statuses, values)) + return read_row_protos + + @staticmethod + def _populate_response_from_feature_data( + feature_data: Iterable[ + Tuple[ + Iterable[Timestamp], Iterable["FieldStatus.ValueType"], Iterable[Value] + ] + ], + indexes: Iterable[List[int]], + online_features_response: GetOnlineFeaturesResponse, + full_feature_names: bool, + requested_features: Iterable[str], + table: FeatureView, + ): + """Populate the GetOnlineFeaturesResponse with feature data. + + This method assumes that `_read_from_online_store` returns data for each + combination of Entities in `entity_rows` in the same order as they + are provided. + + Args: + feature_data: A list of data in Protobuf form which was retrieved from the OnlineStore. + indexes: A list of indexes which should be the same length as `feature_data`. Each list + of indexes corresponds to a set of result rows in `online_features_response`. + online_features_response: The object to populate. + full_feature_names: A boolean that provides the option to add the feature view prefixes to the feature names, + changing them from the format "feature" to "feature_view__feature" (e.g., "daily_transactions" changes to + "customer_fv__daily_transactions"). + requested_features: The names of the features in `feature_data`. This should be ordered in the same way as the + data in `feature_data`. + table: The FeatureView that `feature_data` was retrieved from. + """ + # Add the feature names to the response. + requested_feature_refs = [ + f"{table.projection.name_to_use()}__{feature_name}" + if full_feature_names + else feature_name + for feature_name in requested_features + ] + online_features_response.metadata.feature_names.val.extend( + requested_feature_refs + ) + + timestamps, statuses, values = zip(*feature_data) + + # Populate the result with data fetched from the OnlineStore + # which is guaranteed to be aligned with `requested_features`. + for ( + feature_idx, + (timestamp_vector, statuses_vector, values_vector), + ) in enumerate(zip(zip(*timestamps), zip(*statuses), zip(*values))): + online_features_response.results.append( + GetOnlineFeaturesResponse.FeatureVector( + values=apply_list_mapping(values_vector, indexes), + statuses=apply_list_mapping(statuses_vector, indexes), + event_timestamps=apply_list_mapping(timestamp_vector, indexes), + ) + ) + + @staticmethod + def _augment_response_with_on_demand_transforms( + online_features_response: GetOnlineFeaturesResponse, + feature_refs: List[str], + requested_on_demand_feature_views: List[OnDemandFeatureView], + full_feature_names: bool, + ): + """Computes on demand feature values and adds them to the result rows. + + Assumes that 'online_features_response' already contains the necessary request data and input feature + views for the on demand feature views. Unneeded feature values such as request data and + unrequested input feature views will be removed from 'online_features_response'. + + Args: + online_features_response: Protobuf object to populate + feature_refs: List of all feature references to be returned. + requested_on_demand_feature_views: List of all odfvs that have been requested. + full_feature_names: A boolean that provides the option to add the feature view prefixes to the feature names, + changing them from the format "feature" to "feature_view__feature" (e.g., "daily_transactions" changes to + "customer_fv__daily_transactions"). + """ + requested_odfv_map = { + odfv.name: odfv for odfv in requested_on_demand_feature_views + } + requested_odfv_feature_names = requested_odfv_map.keys() + + odfv_feature_refs = defaultdict(list) + for feature_ref in feature_refs: + view_name, feature_name = feature_ref.split(":") + if view_name in requested_odfv_feature_names: + odfv_feature_refs[view_name].append( + f"{requested_odfv_map[view_name].projection.name_to_use()}__{feature_name}" + if full_feature_names + else feature_name + ) + + initial_response = OnlineResponse(online_features_response) + initial_response_df = initial_response.to_df() + + # Apply on demand transformations and augment the result rows + odfv_result_names = set() + for odfv_name, _feature_refs in odfv_feature_refs.items(): + odfv = requested_odfv_map[odfv_name] + transformed_features_df = odfv.get_transformed_features_df( + initial_response_df, full_feature_names, + ) + selected_subset = [ + f for f in transformed_features_df.columns if f in _feature_refs + ] + + proto_values = [ + python_values_to_proto_values( + transformed_features_df[feature].values, ValueType.UNKNOWN + ) + for feature in selected_subset + ] + + odfv_result_names |= set(selected_subset) + + online_features_response.metadata.feature_names.val.extend(selected_subset) + for feature_idx in range(len(selected_subset)): + online_features_response.results.append( + GetOnlineFeaturesResponse.FeatureVector( + values=proto_values[feature_idx], + statuses=[FieldStatus.PRESENT] * len(proto_values[feature_idx]), + event_timestamps=[Timestamp()] * len(proto_values[feature_idx]), + ) + ) + + @staticmethod + def _drop_unneeded_columns( + online_features_response: GetOnlineFeaturesResponse, + requested_result_row_names: Set[str], + ): + """ + Unneeded feature values such as request data and unrequested input feature views will + be removed from 'online_features_response'. + + Args: + online_features_response: Protobuf object to populate + requested_result_row_names: Fields from 'result_rows' that have been requested, and + therefore should not be dropped. + """ + # Drop values that aren't needed + unneeded_feature_indices = [ + idx + for idx, val in enumerate( + online_features_response.metadata.feature_names.val + ) + if val not in requested_result_row_names + ] + + for idx in reversed(unneeded_feature_indices): + del online_features_response.metadata.feature_names.val[idx] + del online_features_response.results[idx] + + def _get_feature_views_to_use( + self, + features: Optional[Union[List[str], FeatureService]], + allow_cache=False, + hide_dummy_entity: bool = True, + ) -> Tuple[List[FeatureView], List[RequestFeatureView], List[OnDemandFeatureView]]: + + fvs = { + fv.name: fv + for fv in self._list_feature_views(allow_cache, hide_dummy_entity) + } + + request_fvs = { + fv.name: fv + for fv in self._registry.list_request_feature_views( + project=self.project, allow_cache=allow_cache + ) + } + + od_fvs = { + fv.name: fv + for fv in self._registry.list_on_demand_feature_views( + project=self.project, allow_cache=allow_cache + ) + } + + if isinstance(features, FeatureService): + fvs_to_use, request_fvs_to_use, od_fvs_to_use = [], [], [] + for fv_name, projection in [ + (projection.name, projection) + for projection in features.feature_view_projections + ]: + if fv_name in fvs: + fvs_to_use.append( + fvs[fv_name].with_projection(copy.copy(projection)) + ) + elif fv_name in request_fvs: + request_fvs_to_use.append( + request_fvs[fv_name].with_projection(copy.copy(projection)) + ) + elif fv_name in od_fvs: + odfv = od_fvs[fv_name].with_projection(copy.copy(projection)) + od_fvs_to_use.append(odfv) + # Let's make sure to include an FVs which the ODFV requires Features from. + for projection in odfv.source_feature_view_projections.values(): + fv = fvs[projection.name].with_projection(copy.copy(projection)) + if fv not in fvs_to_use: + fvs_to_use.append(fv) else: - for feature_name in feature_data: - feature_ref = ( - f"{table.name}__{feature_name}" - if full_feature_names - else feature_name - ) - if feature_name in requested_features: - result_row.fields[feature_ref].CopyFrom( - feature_data[feature_name] - ) - result_row.statuses[ - feature_ref - ] = GetOnlineFeaturesResponse.FieldStatus.PRESENT + raise ValueError( + f"The provided feature service {features.name} contains a reference to a feature view" + f"{fv_name} which doesn't exist. Please make sure that you have created the feature view" + f'{fv_name} and that you have registered it by running "apply".' + ) + views_to_use = (fvs_to_use, request_fvs_to_use, od_fvs_to_use) + else: + views_to_use = ( + [*fvs.values()], + [*request_fvs.values()], + [*od_fvs.values()], + ) - return OnlineResponse(GetOnlineFeaturesResponse(field_values=result_rows)) + return views_to_use + + @log_exceptions_and_usage + def serve(self, host: str, port: int, no_access_log: bool) -> None: + """Start the feature consumption server locally on a given port.""" + feature_server.start_server(self, host, port, no_access_log) + + @log_exceptions_and_usage + def get_feature_server_endpoint(self) -> Optional[str]: + """Returns endpoint for the feature server, if it exists.""" + return self._provider.get_feature_server_endpoint() + + @log_exceptions_and_usage + def serve_transformations(self, port: int) -> None: + """Start the feature transformation server locally on a given port.""" + if not flags_helper.enable_on_demand_feature_views(self.config): + raise ExperimentalFeatureNotEnabled(flags.FLAG_ON_DEMAND_TRANSFORM_NAME) + from feast import transformation_server -def _entity_row_to_key(row: GetOnlineFeaturesRequestV2.EntityRow) -> EntityKeyProto: - names, values = zip(*row.fields.items()) - return EntityKeyProto(join_keys=names, entity_values=values) # type: ignore + transformation_server.start_server(self, port) + def _teardown_go_server(self): + self._go_server = None -def _entity_row_to_field_values( - row: GetOnlineFeaturesRequestV2.EntityRow, -) -> GetOnlineFeaturesResponse.FieldValues: - result = GetOnlineFeaturesResponse.FieldValues() - for k in row.fields: - result.fields[k].CopyFrom(row.fields[k]) - result.statuses[k] = GetOnlineFeaturesResponse.FieldStatus.PRESENT - return result +def _validate_entity_values(join_key_values: Dict[str, List[Value]]): + set_of_row_lengths = {len(v) for v in join_key_values.values()} + if len(set_of_row_lengths) > 1: + raise ValueError("All entity rows must have the same columns.") + return set_of_row_lengths.pop() def _validate_feature_refs(feature_refs: List[str], full_feature_names: bool = False): + """ + Validates that there are no collisions among the feature references. + + Args: + feature_refs: List of feature references to validate. Feature references must have format + "feature_view:feature", e.g. "customer_fv:daily_transactions". + full_feature_names: If True, the full feature references are compared for collisions; if False, + only the feature names are compared. + + Raises: + FeatureNameCollisionError: There is a collision among the feature references. + """ collided_feature_refs = [] if full_feature_names: @@ -658,63 +2021,68 @@ def _validate_feature_refs(feature_refs: List[str], full_feature_names: bool = F def _group_feature_refs( - feature_refs: List[str], all_feature_views: List[FeatureView] -) -> List[Tuple[FeatureView, List[str]]]: - """ Get list of feature views and corresponding feature names based on feature references""" + features: List[str], + all_feature_views: List[FeatureView], + all_request_feature_views: List[RequestFeatureView], + all_on_demand_feature_views: List[OnDemandFeatureView], +) -> Tuple[ + List[Tuple[FeatureView, List[str]]], + List[Tuple[OnDemandFeatureView, List[str]]], + List[Tuple[RequestFeatureView, List[str]]], + Set[str], +]: + """Get list of feature views and corresponding feature names based on feature references""" # view name to view proto - view_index = {view.name: view for view in all_feature_views} + view_index = {view.projection.name_to_use(): view for view in all_feature_views} + + # request view name to proto + request_view_index = { + view.projection.name_to_use(): view for view in all_request_feature_views + } + + # on demand view to on demand view proto + on_demand_view_index = { + view.projection.name_to_use(): view for view in all_on_demand_feature_views + } # view name to feature names - views_features = defaultdict(list) + views_features = defaultdict(set) + request_views_features = defaultdict(set) + request_view_refs = set() - for ref in feature_refs: - view_name, feat_name = ref.split(":") + # on demand view name to feature names + on_demand_view_features = defaultdict(set) - if view_name not in view_index: + for ref in features: + view_name, feat_name = ref.split(":") + if view_name in view_index: + views_features[view_name].add(feat_name) + elif view_name in on_demand_view_index: + on_demand_view_features[view_name].add(feat_name) + # Let's also add in any FV Feature dependencies here. + for input_fv_projection in on_demand_view_index[ + view_name + ].source_feature_view_projections.values(): + for input_feat in input_fv_projection.features: + views_features[input_fv_projection.name].add(input_feat.name) + elif view_name in request_view_index: + request_views_features[view_name].add(feat_name) + request_view_refs.add(ref) + else: raise FeatureViewNotFoundException(view_name) - views_features[view_name].append(feat_name) - result = [] + fvs_result: List[Tuple[FeatureView, List[str]]] = [] + odfvs_result: List[Tuple[OnDemandFeatureView, List[str]]] = [] + request_fvs_result: List[Tuple[RequestFeatureView, List[str]]] = [] + for view_name, feature_names in views_features.items(): - result.append((view_index[view_name], feature_names)) - return result - - -def _get_table_entity_keys( - table: FeatureView, entity_keys: List[EntityKeyProto], join_key_map: Dict[str, str], -) -> List[EntityKeyProto]: - table_join_keys = [join_key_map[entity_name] for entity_name in table.entities] - required_entities = OrderedDict.fromkeys(sorted(table_join_keys)) - entity_key_protos = [] - for entity_key in entity_keys: - required_entities_to_values = required_entities.copy() - for i in range(len(entity_key.join_keys)): - entity_name = entity_key.join_keys[i] - entity_value = entity_key.entity_values[i] - - if entity_name in required_entities_to_values: - if required_entities_to_values[entity_name] is not None: - raise ValueError( - f"Duplicate entity keys detected. Table {table.name} expects {table_join_keys}. The entity " - f"{entity_name} was provided at least twice" - ) - required_entities_to_values[entity_name] = entity_value - - entity_names = [] - entity_values = [] - for entity_name, entity_value in required_entities_to_values.items(): - if entity_value is None: - raise ValueError( - f"Table {table.name} expects entity field {table_join_keys}. No entity value was found for " - f"{entity_name}" - ) - entity_names.append(entity_name) - entity_values.append(entity_value) - entity_key_protos.append( - EntityKeyProto(join_keys=entity_names, entity_values=entity_values) - ) - return entity_key_protos + fvs_result.append((view_index[view_name], list(feature_names))) + for view_name, feature_names in request_views_features.items(): + request_fvs_result.append((request_view_index[view_name], list(feature_names))) + for view_name, feature_names in on_demand_view_features.items(): + odfvs_result.append((on_demand_view_index[view_name], list(feature_names))) + return fvs_result, odfvs_result, request_fvs_result, request_view_refs def _print_materialization_log( @@ -733,3 +2101,48 @@ def _print_materialization_log( f" to {Style.BRIGHT + Fore.GREEN}{end_date.replace(microsecond=0).astimezone()}{Style.RESET_ALL}" f" into the {Style.BRIGHT + Fore.GREEN}{online_store}{Style.RESET_ALL} online store.\n" ) + + +def _validate_feature_views(feature_views: List[BaseFeatureView]): + """Verify feature views have case-insensitively unique names""" + fv_names = set() + for fv in feature_views: + case_insensitive_fv_name = fv.name.lower() + if case_insensitive_fv_name in fv_names: + raise ValueError( + f"More than one feature view with name {case_insensitive_fv_name} found. " + f"Please ensure that all feature view names are case-insensitively unique. " + f"It may be necessary to ignore certain files in your feature repository by using a .feastignore file." + ) + else: + fv_names.add(case_insensitive_fv_name) + + +def _validate_data_sources(data_sources: List[DataSource]): + """Verify data sources have case-insensitively unique names""" + ds_names = set() + for ds in data_sources: + case_insensitive_ds_name = ds.name.lower() + if case_insensitive_ds_name in ds_names: + if case_insensitive_ds_name.strip(): + warnings.warn( + f"More than one data source with name {case_insensitive_ds_name} found. " + f"Please ensure that all data source names are case-insensitively unique. " + f"It may be necessary to ignore certain files in your feature repository by using a .feastignore " + f"file. Starting in Feast 0.21, unique names (perhaps inferred from the table name) will be " + f"required in data sources to encourage data source discovery" + ) + else: + ds_names.add(case_insensitive_ds_name) + + +def apply_list_mapping( + lst: Iterable[Any], mapping_indexes: Iterable[List[int]] +) -> Iterable[Any]: + output_len = sum(len(item) for item in mapping_indexes) + output = [None] * output_len + for elem, destinations in zip(lst, mapping_indexes): + for idx in destinations: + output[idx] = elem + + return output diff --git a/sdk/python/feast/feature_table.py b/sdk/python/feast/feature_table.py deleted file mode 100644 index 2b09fea8bd..0000000000 --- a/sdk/python/feast/feature_table.py +++ /dev/null @@ -1,411 +0,0 @@ -# Copyright 2020 The Feast Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from typing import Dict, List, MutableMapping, Optional, Union - -import yaml -from google.protobuf import json_format -from google.protobuf.duration_pb2 import Duration -from google.protobuf.json_format import MessageToDict, MessageToJson -from google.protobuf.timestamp_pb2 import Timestamp - -from feast.data_source import DataSource, KafkaSource, KinesisSource -from feast.feature import Feature -from feast.loaders import yaml as feast_yaml -from feast.protos.feast.core.FeatureTable_pb2 import FeatureTable as FeatureTableProto -from feast.protos.feast.core.FeatureTable_pb2 import ( - FeatureTableMeta as FeatureTableMetaProto, -) -from feast.protos.feast.core.FeatureTable_pb2 import ( - FeatureTableSpec as FeatureTableSpecProto, -) -from feast.value_type import ValueType - - -class FeatureTable: - """ - Represents a collection of features and associated metadata. - """ - - def __init__( - self, - name: str, - entities: List[str], - features: List[Feature], - batch_source: DataSource = None, - stream_source: Optional[Union[KafkaSource, KinesisSource]] = None, - max_age: Optional[Duration] = None, - labels: Optional[MutableMapping[str, str]] = None, - ): - self._name = name - self._entities = entities - self._features = features - self._batch_source = batch_source - self._stream_source = stream_source - - self._labels: MutableMapping[str, str] - if labels is None: - self._labels = dict() - else: - self._labels = labels - - self._max_age = max_age - self._created_timestamp: Optional[Timestamp] = None - self._last_updated_timestamp: Optional[Timestamp] = None - - def __str__(self): - return str(MessageToJson(self.to_proto())) - - def __eq__(self, other): - if not isinstance(other, FeatureTable): - raise TypeError( - "Comparisons should only involve FeatureTable class objects." - ) - - if ( - self.labels != other.labels - or self.name != other.name - or self.max_age != other.max_age - ): - return False - - if sorted(self.entities) != sorted(other.entities): - return False - if sorted(self.features) != sorted(other.features): - return False - if self.batch_source != other.batch_source: - return False - if self.stream_source != other.stream_source: - return False - - return True - - @property - def name(self): - """ - Returns the name of this feature table - """ - return self._name - - @name.setter - def name(self, name: str): - """ - Sets the name of this feature table - """ - self._name = name - - @property - def entities(self): - """ - Returns the entities of this feature table - """ - return self._entities - - @entities.setter - def entities(self, entities: List[str]): - """ - Sets the entities of this feature table - """ - self._entities = entities - - @property - def features(self): - """ - Returns the features of this feature table - """ - return self._features - - @features.setter - def features(self, features: List[Feature]): - """ - Sets the features of this feature table - """ - self._features = features - - @property - def batch_source(self): - """ - Returns the batch source of this feature table - """ - return self._batch_source - - @batch_source.setter - def batch_source(self, batch_source: DataSource): - """ - Sets the batch source of this feature table - """ - self._batch_source = batch_source - - @property - def stream_source(self): - """ - Returns the stream source of this feature table - """ - return self._stream_source - - @stream_source.setter - def stream_source(self, stream_source: Union[KafkaSource, KinesisSource]): - """ - Sets the stream source of this feature table - """ - self._stream_source = stream_source - - @property - def max_age(self): - """ - Returns the maximum age of this feature table. This is the total maximum - amount of staleness that will be allowed during feature retrieval for - each specific feature that is looked up. - """ - return self._max_age - - @max_age.setter - def max_age(self, max_age: Duration): - """ - Set the maximum age for this feature table - """ - self._max_age = max_age - - @property - def labels(self): - """ - Returns the labels of this feature table. This is the user defined metadata - defined as a dictionary. - """ - return self._labels - - @labels.setter - def labels(self, labels: MutableMapping[str, str]): - """ - Set the labels for this feature table - """ - self._labels = labels - - @property - def created_timestamp(self): - """ - Returns the created_timestamp of this feature table - """ - return self._created_timestamp - - @property - def last_updated_timestamp(self): - """ - Returns the last_updated_timestamp of this feature table - """ - return self._last_updated_timestamp - - def add_feature(self, feature: Feature): - """ - Adds a new feature to the feature table. - """ - self.features.append(feature) - - def is_valid(self): - """ - Validates the state of a feature table locally. Raises an exception - if feature table is invalid. - """ - - if not self.name: - raise ValueError("No name found in feature table.") - - if not self.entities: - raise ValueError("No entities found in feature table {self.name}.") - - @classmethod - def from_yaml(cls, yml: str): - """ - Creates a feature table from a YAML string body or a file path - - Args: - yml: Either a file path containing a yaml file or a YAML string - - Returns: - Returns a FeatureTable object based on the YAML file - """ - - return cls.from_dict(feast_yaml.yaml_loader(yml, load_single=True)) - - @classmethod - def from_dict(cls, ft_dict): - """ - Creates a feature table from a dict - - Args: - ft_dict: A dict representation of a feature table - - Returns: - Returns a FeatureTable object based on the feature table dict - """ - - feature_table_proto = json_format.ParseDict( - ft_dict, FeatureTableProto(), ignore_unknown_fields=True - ) - - return cls.from_proto(feature_table_proto) - - @classmethod - def from_proto(cls, feature_table_proto: FeatureTableProto): - """ - Creates a feature table from a protobuf representation of a feature table - - Args: - feature_table_proto: A protobuf representation of a feature table - - Returns: - Returns a FeatureTableProto object based on the feature table protobuf - """ - - feature_table = cls( - name=feature_table_proto.spec.name, - entities=[entity for entity in feature_table_proto.spec.entities], - features=[ - Feature( - name=feature.name, - dtype=ValueType(feature.value_type), - labels=feature.labels, - ) - for feature in feature_table_proto.spec.features - ], - labels=feature_table_proto.spec.labels, - max_age=( - None - if feature_table_proto.spec.max_age.seconds == 0 - and feature_table_proto.spec.max_age.nanos == 0 - else feature_table_proto.spec.max_age - ), - batch_source=DataSource.from_proto(feature_table_proto.spec.batch_source), - stream_source=( - None - if not feature_table_proto.spec.stream_source.ByteSize() - else DataSource.from_proto(feature_table_proto.spec.stream_source) - ), - ) - - feature_table._created_timestamp = feature_table_proto.meta.created_timestamp - - return feature_table - - def to_proto(self) -> FeatureTableProto: - """ - Converts an feature table object to its protobuf representation - - Returns: - FeatureTableProto protobuf - """ - - meta = FeatureTableMetaProto( - created_timestamp=self.created_timestamp, - last_updated_timestamp=self.last_updated_timestamp, - ) - - batch_source_proto = self.batch_source.to_proto() - batch_source_proto.data_source_class_type = f"{self.batch_source.__class__.__module__}.{self.batch_source.__class__.__name__}" - - stream_source_proto = None - if self.stream_source: - stream_source_proto = self.stream_source.to_proto() - stream_source_proto.data_source_class_type = f"{self.stream_source.__class__.__module__}.{self.stream_source.__class__.__name__}" - - spec = FeatureTableSpecProto( - name=self.name, - entities=self.entities, - features=[ - feature.to_proto() if type(feature) == Feature else feature - for feature in self.features - ], - labels=self.labels, - max_age=self.max_age, - batch_source=batch_source_proto, - stream_source=stream_source_proto, - ) - - return FeatureTableProto(spec=spec, meta=meta) - - def to_spec_proto(self) -> FeatureTableSpecProto: - """ - Converts an FeatureTableProto object to its protobuf representation. - Used when passing FeatureTableSpecProto object to Feast request. - - Returns: - FeatureTableSpecProto protobuf - """ - - spec = FeatureTableSpecProto( - name=self.name, - entities=self.entities, - features=[ - feature.to_proto() if type(feature) == Feature else feature - for feature in self.features - ], - labels=self.labels, - max_age=self.max_age, - batch_source=( - self.batch_source.to_proto() - if issubclass(type(self.batch_source), DataSource) - else self.batch_source - ), - stream_source=( - self.stream_source.to_proto() - if issubclass(type(self.stream_source), DataSource) - else self.stream_source - ), - ) - - return spec - - def to_dict(self) -> Dict: - """ - Converts feature table to dict - - :return: Dictionary object representation of feature table - """ - feature_table_dict = MessageToDict(self.to_proto()) - - # Remove meta when empty for more readable exports - if feature_table_dict["meta"] == {}: - del feature_table_dict["meta"] - - return feature_table_dict - - def to_yaml(self): - """ - Converts a feature table to a YAML string. - - :return: Feature table string returned in YAML format - """ - feature_table_dict = self.to_dict() - return yaml.dump(feature_table_dict, allow_unicode=True, sort_keys=False) - - def _update_from_feature_table(self, feature_table): - """ - Deep replaces one feature table with another - - Args: - feature_table: Feature table to use as a source of configuration - """ - - self.name = feature_table.name - self.entities = feature_table.entities - self.features = feature_table.features - self.labels = feature_table.labels - self.max_age = feature_table.max_age - self.batch_source = feature_table.batch_source - self.stream_source = feature_table.stream_source - self._created_timestamp = feature_table.created_timestamp - self._last_updated_timestamp = feature_table.last_updated_timestamp - - def __repr__(self): - return f"FeatureTable <{self.name}>" diff --git a/sdk/python/feast/feature_view.py b/sdk/python/feast/feature_view.py index b1c8a47902..7060870780 100644 --- a/sdk/python/feast/feature_view.py +++ b/sdk/python/feast/feature_view.py @@ -11,19 +11,20 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import re +import copy +import warnings from datetime import datetime, timedelta -from typing import Dict, List, Optional, Tuple, Union +from typing import Dict, List, Optional, Tuple, Type, Union from google.protobuf.duration_pb2 import Duration -from google.protobuf.json_format import MessageToJson -from google.protobuf.timestamp_pb2 import Timestamp from feast import utils -from feast.data_source import DataSource -from feast.errors import RegistryInferenceFailure +from feast.base_feature_view import BaseFeatureView +from feast.data_source import DataSource, KafkaSource, KinesisSource, PushSource +from feast.entity import Entity from feast.feature import Feature from feast.feature_view_projection import FeatureViewProjection +from feast.field import Field from feast.protos.feast.core.FeatureView_pb2 import FeatureView as FeatureViewProto from feast.protos.feast.core.FeatureView_pb2 import ( FeatureViewMeta as FeatureViewMetaProto, @@ -34,92 +35,257 @@ from feast.protos.feast.core.FeatureView_pb2 import ( MaterializationInterval as MaterializationIntervalProto, ) -from feast.repo_config import RepoConfig from feast.usage import log_exceptions from feast.value_type import ValueType +warnings.simplefilter("once", DeprecationWarning) -class FeatureView: +# DUMMY_ENTITY is a placeholder entity used in entityless FeatureViews +DUMMY_ENTITY_ID = "__dummy_id" +DUMMY_ENTITY_NAME = "__dummy" +DUMMY_ENTITY_VAL = "" +DUMMY_ENTITY = Entity( + name=DUMMY_ENTITY_NAME, join_keys=[DUMMY_ENTITY_ID], value_type=ValueType.STRING, +) + + +class FeatureView(BaseFeatureView): """ - A FeatureView defines a logical grouping of serveable features. + A FeatureView defines a logical group of features. + + Attributes: + name: The unique name of the feature view. + entities: The list of entities with which this group of features is associated. + ttl: The amount of time this group of features lives. A ttl of 0 indicates that + this group of features lives forever. Note that large ttl's or a ttl of 0 + can result in extremely computationally intensive queries. + batch_source (optional): The batch source of data where this group of features + is stored. This is optional ONLY if a push source is specified as the + stream_source, since push sources contain their own batch sources. This is deprecated in favor of `source`. + stream_source (optional): The stream source of data where this group of features + is stored. This is deprecated in favor of `source`. + schema: The schema of the feature view, including feature, timestamp, and entity + columns. + features: The list of features defined as part of this feature view. Each + feature should also be included in the schema. + online: A boolean indicating whether online retrieval is enabled for this feature + view. + description: A human-readable description. + tags: A dictionary of key-value pairs to store arbitrary metadata. + owner: The owner of the feature view, typically the email of the primary + maintainer. + source (optional): The source of data for this group of features. May be a stream source, or a batch source. + If a stream source, the source should contain a batch_source for backfills & batch materialization. """ name: str entities: List[str] - features: List[Feature] - tags: Optional[Dict[str, str]] ttl: Optional[timedelta] + batch_source: DataSource + stream_source: Optional[DataSource] + schema: List[Field] + features: List[Field] online: bool - input: DataSource - batch_source: Optional[DataSource] = None - stream_source: Optional[DataSource] = None - created_timestamp: Optional[Timestamp] = None - last_updated_timestamp: Optional[Timestamp] = None + description: str + tags: Dict[str, str] + owner: str materialization_intervals: List[Tuple[datetime, datetime]] + source: Optional[DataSource] @log_exceptions def __init__( self, - name: str, - entities: List[str], - ttl: Optional[Union[Duration, timedelta]], - input: DataSource, + *args, + name: Optional[str] = None, + entities: Optional[Union[List[Entity], List[str]]] = None, + ttl: Optional[Union[Duration, timedelta]] = None, batch_source: Optional[DataSource] = None, stream_source: Optional[DataSource] = None, - features: List[Feature] = None, + features: Optional[List[Feature]] = None, tags: Optional[Dict[str, str]] = None, online: bool = True, + description: str = "", + owner: str = "", + schema: Optional[List[Field]] = None, + source: Optional[DataSource] = None, ): - _input = input or batch_source - assert _input is not None + """ + Creates a FeatureView object. + + Args: + name: The unique name of the feature view. + entities: The list of entities with which this group of features is associated. + ttl: The amount of time this group of features lives. A ttl of 0 indicates that + this group of features lives forever. Note that large ttl's or a ttl of 0 + can result in extremely computationally intensive queries. + batch_source: The batch source of data where this group of features is stored. + stream_source (optional): The stream source of data where this group of features + is stored. + features (deprecated): The list of features defined as part of this feature view. + tags (optional): A dictionary of key-value pairs to store arbitrary metadata. + online (optional): A boolean indicating whether online retrieval is enabled for + this feature view. + description (optional): A human-readable description. + owner (optional): The owner of the feature view, typically the email of the + primary maintainer. + schema (optional): The schema of the feature view, including feature, timestamp, + and entity columns. + source (optional): The source of data for this group of features. May be a stream source, or a batch source. + If a stream source, the source should contain a batch_source for backfills & batch materialization. + + Raises: + ValueError: A field mapping conflicts with an Entity or a Feature. + """ - _features = features or [] + positional_attributes = ["name", "entities", "ttl"] - cols = [entity for entity in entities] + [feat.name for feat in _features] - for col in cols: - if _input.field_mapping is not None and col in _input.field_mapping.keys(): + _name = name + _entities = entities + _ttl = ttl + + if args: + warnings.warn( + ( + "feature view parameters should be specified as a keyword argument instead of a positional arg." + "Feast 0.23+ will not support positional arguments to construct feature views" + ), + DeprecationWarning, + ) + if len(args) > len(positional_attributes): raise ValueError( - f"The field {col} is mapped to {_input.field_mapping[col]} for this data source. " - f"Please either remove this field mapping or use {_input.field_mapping[col]} as the " - f"Entity or Feature name." + f"Only {', '.join(positional_attributes)} are allowed as positional args when defining " + f"feature views, for backwards compatibility." ) + if len(args) >= 1: + _name = args[0] + if len(args) >= 2: + _entities = args[1] + if len(args) >= 3: + _ttl = args[2] + + if not _name: + raise ValueError("feature view name needs to be specified") + + self.name = _name + self.entities = ( + [e.name if isinstance(e, Entity) else e for e in _entities] + if _entities + else [DUMMY_ENTITY_NAME] + ) - self.name = name - self.entities = entities - self.features = _features - self.tags = tags if tags is not None else {} + self._initialize_sources(_name, batch_source, stream_source, source) - if isinstance(ttl, Duration): - self.ttl = timedelta(seconds=int(ttl.seconds)) + if isinstance(_ttl, Duration): + self.ttl = timedelta(seconds=int(_ttl.seconds)) + warnings.warn( + ( + "The option to pass a Duration object to the ttl parameter is being deprecated. " + "Please pass a timedelta object instead. Feast 0.21 and onwards will not support " + "Duration objects." + ), + DeprecationWarning, + ) + elif isinstance(_ttl, timedelta) or _ttl is None: + self.ttl = _ttl else: - self.ttl = ttl + raise ValueError(f"unknown value type specified for ttl {type(_ttl)}") - self.online = online - self.input = _input - self.batch_source = _input - self.stream_source = stream_source + if features is not None: + warnings.warn( + ( + "The `features` parameter is being deprecated in favor of the `schema` parameter. " + "Please switch from using `features` to `schema`. This will also requiring switching " + "feature definitions from using `Feature` to `Field`. Feast 0.21 and onwards will not " + "support the `features` parameter." + ), + DeprecationWarning, + ) - self.materialization_intervals = [] + _schema = schema or [] + if len(_schema) == 0 and features is not None: + _schema = [Field.from_feature(feature) for feature in features] + self.schema = _schema - def __repr__(self): - items = (f"{k} = {v}" for k, v in self.__dict__.items()) - return f"<{self.__class__.__name__}({', '.join(items)})>" + # TODO(felixwang9817): Infer which fields in the schema are features, timestamps, + # and entities. For right now we assume that all fields are features, since the + # current `features` parameter only accepts feature columns. + _features = _schema - def __str__(self): - return str(MessageToJson(self.to_proto())) + cols = [entity for entity in self.entities] + [ + field.name for field in _features + ] + for col in cols: + if ( + self.batch_source.field_mapping is not None + and col in self.batch_source.field_mapping.keys() + ): + raise ValueError( + f"The field {col} is mapped to {self.batch_source.field_mapping[col]} for this data source. " + f"Please either remove this field mapping or use {self.batch_source.field_mapping[col]} as the " + f"Entity or Feature name." + ) - def __hash__(self): - return hash(self.name) + super().__init__( + name=_name, + features=_features, + description=description, + tags=tags, + owner=owner, + ) + self.online = online + self.materialization_intervals = [] - def __getitem__(self, item) -> FeatureViewProjection: - assert isinstance(item, list) + def _initialize_sources(self, name, batch_source, stream_source, source): + if source: + if ( + isinstance(source, PushSource) + or isinstance(source, KafkaSource) + or isinstance(source, KinesisSource) + ): + self.stream_source = source + if not source.batch_source: + raise ValueError( + f"A batch_source needs to be specified for stream source `{source.name}`" + ) + else: + self.batch_source = source.batch_source + else: + self.stream_source = stream_source + self.batch_source = source + else: + warnings.warn( + "batch_source and stream_source have been deprecated in favor of `source`." + "The deprecated fields will be removed in Feast 0.23.", + DeprecationWarning, + ) + if stream_source is not None and isinstance(stream_source, PushSource): + self.stream_source = stream_source + self.batch_source = stream_source.batch_source + else: + if batch_source is None: + raise ValueError( + f"A batch_source needs to be specified for feature view `{name}`" + ) + self.stream_source = stream_source + self.batch_source = batch_source + self.source = source - referenced_features = [] - for feature in self.features: - if feature.name in item: - referenced_features.append(feature) + def __hash__(self): + return super().__hash__() - return FeatureViewProjection(self.name, referenced_features) + def __copy__(self): + fv = FeatureView( + name=self.name, + entities=self.entities, + ttl=self.ttl, + source=self.batch_source, + stream_source=self.stream_source, + schema=self.schema, + tags=self.tags, + online=self.online, + ) + fv.projection = copy.copy(self.projection) + return fv def __eq__(self, other): if not isinstance(other, FeatureView): @@ -127,50 +293,84 @@ def __eq__(self, other): "Comparisons should only involve FeatureView class objects." ) + if not super().__eq__(other): + return False + if ( - self.tags != other.tags - or self.name != other.name + sorted(self.entities) != sorted(other.entities) or self.ttl != other.ttl or self.online != other.online + or self.batch_source != other.batch_source + or self.stream_source != other.stream_source + or self.schema != other.schema ): return False - if sorted(self.entities) != sorted(other.entities): - return False - if sorted(self.features) != sorted(other.features): - return False - if self.input != other.input: - return False - if self.stream_source != other.stream_source: - return False - return True - def is_valid(self): - """ - Validates the state of a feature view locally. Raises an exception - if feature view is invalid. + def ensure_valid(self): """ + Validates the state of this feature view locally. - if not self.name: - raise ValueError("Feature view needs a name") + Raises: + ValueError: The feature view does not have a name or does not have entities. + """ + super().ensure_valid() if not self.entities: - raise ValueError("Feature view has no entities") + raise ValueError("Feature view has no entities.") + + @property + def proto_class(self) -> Type[FeatureViewProto]: + return FeatureViewProto + + def with_join_key_map(self, join_key_map: Dict[str, str]): + """ + Returns a copy of this feature view with the join key map set to the given map. + This join_key mapping operation is only used as part of query operations and will + not modify the underlying FeatureView. + + Args: + join_key_map: A map of join keys in which the left is the join_key that + corresponds with the feature data and the right corresponds with the entity data. + + Examples: + Join a location feature data table to both the origin column and destination + column of the entity data. + + temperatures_feature_service = FeatureService( + name="temperatures", + features=[ + location_stats_feature_view + .with_name("origin_stats") + .with_join_key_map( + {"location_id": "origin_id"} + ), + location_stats_feature_view + .with_name("destination_stats") + .with_join_key_map( + {"location_id": "destination_id"} + ), + ], + ) + """ + cp = self.__copy__() + cp.projection.join_key_map = join_key_map + + return cp def to_proto(self) -> FeatureViewProto: """ - Converts an feature view object to its protobuf representation. + Converts a feature view object to its protobuf representation. Returns: - FeatureViewProto protobuf + A FeatureViewProto protobuf. """ - - meta = FeatureViewMetaProto( - created_timestamp=self.created_timestamp, - last_updated_timestamp=self.last_updated_timestamp, - materialization_intervals=[], - ) + meta = FeatureViewMetaProto(materialization_intervals=[]) + if self.created_timestamp: + meta.created_timestamp.FromDatetime(self.created_timestamp) + if self.last_updated_timestamp: + meta.last_updated_timestamp.FromDatetime(self.last_updated_timestamp) for interval in self.materialization_intervals: interval_proto = MaterializationIntervalProto() interval_proto.start_time.FromDatetime(interval[0]) @@ -182,10 +382,8 @@ def to_proto(self) -> FeatureViewProto: ttl_duration = Duration() ttl_duration.FromTimedelta(self.ttl) - batch_source_proto = self.input.to_proto() - batch_source_proto.data_source_class_type = ( - f"{self.input.__class__.__module__}.{self.input.__class__.__name__}" - ) + batch_source_proto = self.batch_source.to_proto() + batch_source_proto.data_source_class_type = f"{self.batch_source.__class__.__module__}.{self.batch_source.__class__.__name__}" stream_source_proto = None if self.stream_source: @@ -195,8 +393,10 @@ def to_proto(self) -> FeatureViewProto: spec = FeatureViewSpecProto( name=self.name, entities=self.entities, - features=[feature.to_proto() for feature in self.features], + features=[field.to_proto() for field in self.schema], + description=self.description, tags=self.tags, + owner=self.owner, ttl=(ttl_duration if ttl_duration is not None else None), online=self.online, batch_source=batch_source_proto, @@ -208,16 +408,15 @@ def to_proto(self) -> FeatureViewProto: @classmethod def from_proto(cls, feature_view_proto: FeatureViewProto): """ - Creates a feature view from a protobuf representation of a feature view + Creates a feature view from a protobuf representation of a feature view. Args: - feature_view_proto: A protobuf representation of a feature view + feature_view_proto: A protobuf representation of a feature view. Returns: - Returns a FeatureViewProto object based on the feature view protobuf + A FeatureViewProto object based on the feature view protobuf. """ - - _input = DataSource.from_proto(feature_view_proto.spec.batch_source) + batch_source = DataSource.from_proto(feature_view_proto.spec.batch_source) stream_source = ( DataSource.from_proto(feature_view_proto.spec.stream_source) if feature_view_proto.spec.HasField("stream_source") @@ -226,28 +425,35 @@ def from_proto(cls, feature_view_proto: FeatureViewProto): feature_view = cls( name=feature_view_proto.spec.name, entities=[entity for entity in feature_view_proto.spec.entities], - features=[ - Feature( - name=feature.name, - dtype=ValueType(feature.value_type), - labels=feature.labels, - ) - for feature in feature_view_proto.spec.features + schema=[ + Field.from_proto(field_proto) + for field_proto in feature_view_proto.spec.features ], + description=feature_view_proto.spec.description, tags=dict(feature_view_proto.spec.tags), + owner=feature_view_proto.spec.owner, online=feature_view_proto.spec.online, ttl=( - None - if feature_view_proto.spec.ttl.seconds == 0 - and feature_view_proto.spec.ttl.nanos == 0 - else feature_view_proto.spec.ttl + timedelta(days=0) + if feature_view_proto.spec.ttl.ToNanoseconds() == 0 + else feature_view_proto.spec.ttl.ToTimedelta() ), - input=_input, - batch_source=_input, + source=batch_source, stream_source=stream_source, ) - feature_view.created_timestamp = feature_view_proto.meta.created_timestamp + # FeatureViewProjections are not saved in the FeatureView proto. + # Create the default projection. + feature_view.projection = FeatureViewProjection.from_definition(feature_view) + + if feature_view_proto.meta.HasField("created_timestamp"): + feature_view.created_timestamp = ( + feature_view_proto.meta.created_timestamp.ToDatetime() + ) + if feature_view_proto.meta.HasField("last_updated_timestamp"): + feature_view.last_updated_timestamp = ( + feature_view_proto.meta.last_updated_timestamp.ToDatetime() + ) for interval in feature_view_proto.meta.materialization_intervals: feature_view.materialization_intervals.append( @@ -261,40 +467,12 @@ def from_proto(cls, feature_view_proto: FeatureViewProto): @property def most_recent_end_time(self) -> Optional[datetime]: + """ + Retrieves the latest time up to which the feature view has been materialized. + + Returns: + The latest time, or None if the feature view has not been materialized. + """ if len(self.materialization_intervals) == 0: return None return max([interval[1] for interval in self.materialization_intervals]) - - def infer_features_from_input_source(self, config: RepoConfig): - if not self.features: - columns_to_exclude = { - self.input.event_timestamp_column, - self.input.created_timestamp_column, - } | set(self.entities) - - for col_name, col_datatype in self.input.get_table_column_names_and_types( - config - ): - if col_name not in columns_to_exclude and not re.match( - "^__|__$", - col_name, # double underscores often signal an internal-use column - ): - feature_name = ( - self.input.field_mapping[col_name] - if col_name in self.input.field_mapping.keys() - else col_name - ) - self.features.append( - Feature( - feature_name, - self.input.source_datatype_to_feast_value_type()( - col_datatype - ), - ) - ) - - if not self.features: - raise RegistryInferenceFailure( - "FeatureView", - f"Could not infer Features for the FeatureView named {self.name}.", - ) diff --git a/sdk/python/feast/feature_view_projection.py b/sdk/python/feast/feature_view_projection.py index 15b24889da..a8e0e8cfe5 100644 --- a/sdk/python/feast/feature_view_projection.py +++ b/sdk/python/feast/feature_view_projection.py @@ -1,21 +1,43 @@ -from typing import List +from typing import TYPE_CHECKING, Dict, List, Optional from attr import dataclass -from feast.feature import Feature +from feast.field import Field from feast.protos.feast.core.FeatureViewProjection_pb2 import ( FeatureViewProjection as FeatureViewProjectionProto, ) +if TYPE_CHECKING: + from feast.base_feature_view import BaseFeatureView + @dataclass class FeatureViewProjection: + """ + A feature view projection represents a selection of one or more features from a + single feature view. + + Attributes: + name: The unique name of the feature view from which this projection is created. + name_alias: An optional alias for the name. + features: The list of features represented by the feature view projection. + join_key_map: A map to modify join key columns during retrieval of this feature + view projection. + """ + name: str - features: List[Feature] + name_alias: Optional[str] + features: List[Field] + join_key_map: Dict[str, str] = {} - def to_proto(self): + def name_to_use(self): + return self.name_alias or self.name + + def to_proto(self) -> FeatureViewProjectionProto: feature_reference_proto = FeatureViewProjectionProto( - feature_view_name=self.name + feature_view_name=self.name, + feature_view_name_alias=self.name_alias or "", + join_key_map=self.join_key_map, ) for feature in self.features: feature_reference_proto.feature_columns.append(feature.to_proto()) @@ -24,14 +46,21 @@ def to_proto(self): @staticmethod def from_proto(proto: FeatureViewProjectionProto): - ref = FeatureViewProjection(name=proto.feature_view_name, features=[]) + feature_view_projection = FeatureViewProjection( + name=proto.feature_view_name, + name_alias=proto.feature_view_name_alias, + features=[], + join_key_map=dict(proto.join_key_map), + ) for feature_column in proto.feature_columns: - ref.features.append(Feature.from_proto(feature_column)) + feature_view_projection.features.append(Field.from_proto(feature_column)) - return ref + return feature_view_projection @staticmethod - def from_definition(feature_definition): + def from_definition(base_feature_view: "BaseFeatureView"): return FeatureViewProjection( - name=feature_definition.name, features=feature_definition.features + name=base_feature_view.name, + name_alias=None, + features=base_feature_view.features, ) diff --git a/sdk/python/feast/field.py b/sdk/python/feast/field.py new file mode 100644 index 0000000000..77011e6758 --- /dev/null +++ b/sdk/python/feast/field.py @@ -0,0 +1,103 @@ +# Copyright 2022 The Feast Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Dict, Optional + +from feast.feature import Feature +from feast.protos.feast.core.Feature_pb2 import FeatureSpecV2 as FieldProto +from feast.types import FeastType, from_value_type +from feast.value_type import ValueType + + +class Field: + """ + A Field represents a set of values with the same structure. + + Attributes: + name: The name of the field. + dtype: The type of the field, such as string or float. + tags: User-defined metadata in dictionary form. + """ + + name: str + dtype: FeastType + tags: Dict[str, str] + + def __init__( + self, *, name: str, dtype: FeastType, tags: Optional[Dict[str, str]] = None, + ): + """ + Creates a Field object. + + Args: + name: The name of the field. + dtype: The type of the field, such as string or float. + tags (optional): User-defined metadata in dictionary form. + """ + self.name = name + self.dtype = dtype + self.tags = tags or {} + + def __eq__(self, other): + if ( + self.name != other.name + or self.dtype != other.dtype + or self.tags != other.tags + ): + return False + return True + + def __hash__(self): + return hash((self.name, hash(self.dtype))) + + def __lt__(self, other): + return self.name < other.name + + def __repr__(self): + return f"{self.name}-{self.dtype}" + + def __str__(self): + return f"Field(name={self.name}, dtype={self.dtype}, tags={self.tags})" + + def to_proto(self) -> FieldProto: + """Converts a Field object to its protobuf representation.""" + value_type = self.dtype.to_value_type() + return FieldProto(name=self.name, value_type=value_type.value, tags=self.tags) + + @classmethod + def from_proto(cls, field_proto: FieldProto): + """ + Creates a Field object from a protobuf representation. + + Args: + field_proto: FieldProto protobuf object + """ + value_type = ValueType(field_proto.value_type) + return cls( + name=field_proto.name, + dtype=from_value_type(value_type=value_type), + tags=dict(field_proto.tags), + ) + + @classmethod + def from_feature(cls, feature: Feature): + """ + Creates a Field object from a Feature object. + + Args: + feature: Feature object to convert. + """ + return cls( + name=feature.name, dtype=from_value_type(feature.dtype), tags=feature.labels + ) diff --git a/sdk/python/feast/flags.py b/sdk/python/feast/flags.py new file mode 100644 index 0000000000..26e20d81f6 --- /dev/null +++ b/sdk/python/feast/flags.py @@ -0,0 +1,10 @@ +FLAG_ALPHA_FEATURES_NAME = "alpha_features" +FLAG_ON_DEMAND_TRANSFORM_NAME = "on_demand_transforms" +FLAG_AWS_LAMBDA_FEATURE_SERVER_NAME = "aws_lambda_feature_server" +ENV_FLAG_IS_TEST = "IS_TEST" + +FLAG_NAMES = { + FLAG_ALPHA_FEATURES_NAME, + FLAG_ON_DEMAND_TRANSFORM_NAME, + FLAG_AWS_LAMBDA_FEATURE_SERVER_NAME, +} diff --git a/sdk/python/feast/flags_helper.py b/sdk/python/feast/flags_helper.py new file mode 100644 index 0000000000..7cf16dbf0b --- /dev/null +++ b/sdk/python/feast/flags_helper.py @@ -0,0 +1,39 @@ +import os + +from feast import flags +from feast.repo_config import RepoConfig + + +def _env_flag_enabled(name: str) -> bool: + return os.getenv(name, default="False") == "True" + + +def feature_flag_enabled(repo_config: RepoConfig, flag_name: str) -> bool: + if is_test(): + return True + return ( + _alpha_feature_flag_enabled(repo_config) + and repo_config.flags is not None + and flag_name in repo_config.flags + and repo_config.flags[flag_name] + ) + + +def _alpha_feature_flag_enabled(repo_config: RepoConfig) -> bool: + return ( + repo_config.flags is not None + and flags.FLAG_ALPHA_FEATURES_NAME in repo_config.flags + and repo_config.flags[flags.FLAG_ALPHA_FEATURES_NAME] + ) + + +def is_test() -> bool: + return _env_flag_enabled(flags.ENV_FLAG_IS_TEST) + + +def enable_on_demand_feature_views(repo_config: RepoConfig) -> bool: + return feature_flag_enabled(repo_config, flags.FLAG_ON_DEMAND_TRANSFORM_NAME) + + +def enable_aws_lambda_feature_server(repo_config: RepoConfig) -> bool: + return feature_flag_enabled(repo_config, flags.FLAG_AWS_LAMBDA_FEATURE_SERVER_NAME) diff --git a/sdk/python/feast/grpc/auth.py b/sdk/python/feast/grpc/auth.py deleted file mode 100644 index 3deb95be24..0000000000 --- a/sdk/python/feast/grpc/auth.py +++ /dev/null @@ -1,246 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2018-2020 The Feast Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import json -import time -from http import HTTPStatus - -import grpc -import requests -from google.auth.exceptions import DefaultCredentialsError -from google.auth.transport import requests as grequests - -from feast.config import Config -from feast.constants import AuthProvider -from feast.constants import ConfigOptions as opt - - -def get_auth_metadata_plugin(config: Config) -> grpc.AuthMetadataPlugin: - """ - Get an Authentication Metadata Plugin. This plugin is used in gRPC to - sign requests. Please see the following URL for more details - https://grpc.github.io/grpc/python/_modules/grpc.html#AuthMetadataPlugin - - New plugins can be added to this function. For the time being we only - support Google Open ID authentication. - - Returns: Returns an implementation of grpc.AuthMetadataPlugin - - Args: - config: Feast Configuration object - """ - if AuthProvider(config.get(opt.AUTH_PROVIDER)) == AuthProvider.GOOGLE: - return GoogleOpenIDAuthMetadataPlugin(config) - elif AuthProvider(config.get(opt.AUTH_PROVIDER)) == AuthProvider.OAUTH: - return OAuthMetadataPlugin(config) - else: - raise RuntimeError( - "Could not determine OAuth provider." - 'Must be set to either "google" or "oauth"' - ) - - -class OAuthMetadataPlugin(grpc.AuthMetadataPlugin): - """A `gRPC AuthMetadataPlugin`_ that inserts the credentials into each - request. - - .. _gRPC AuthMetadataPlugin: - http://www.grpc.io/grpc/python/grpc.html#grpc.AuthMetadataPlugin - """ - - def __init__(self, config: Config): - """ - Initializes an OAuthMetadataPlugin, used to sign gRPC requests - Args: - config: Feast Configuration object - """ - super(OAuthMetadataPlugin, self).__init__() - - self._static_token = None - self._token = None - - # If provided, set a static token - if config.exists(opt.AUTH_TOKEN): - self._static_token = config.get(opt.AUTH_TOKEN) - self._refresh_token(config) - elif ( - config.exists(opt.OAUTH_GRANT_TYPE) - and config.exists(opt.OAUTH_CLIENT_ID) - and config.exists(opt.OAUTH_CLIENT_SECRET) - and config.exists(opt.OAUTH_AUDIENCE) - and config.exists(opt.OAUTH_TOKEN_REQUEST_URL) - ): - self._refresh_token(config) - else: - raise RuntimeError( - " Please ensure that the " - "necessary parameters are passed to the client - " - "oauth_grant_type, oauth_client_id, oauth_client_secret, " - "oauth_audience, oauth_token_request_url." - ) - - def get_signed_meta(self): - """ Creates a signed authorization metadata token.""" - return (("authorization", "Bearer {}".format(self._token)),) - - def _refresh_token(self, config: Config): - """ Refreshes OAuth token and persists it in memory """ - - # Use static token if available - if self._static_token: - self._token = self._static_token - return - - headers_token = {"content-type": "application/json"} - data_token_dict = { - "grant_type": config.get(opt.OAUTH_GRANT_TYPE), - "client_id": config.get(opt.OAUTH_CLIENT_ID), - "client_secret": config.get(opt.OAUTH_CLIENT_SECRET), - "audience": config.get(opt.OAUTH_AUDIENCE), - } - data_token = json.dumps(data_token_dict) - response_token = requests.post( - config.get(opt.OAUTH_TOKEN_REQUEST_URL), - headers=headers_token, - data=data_token, - ) - if response_token.status_code == HTTPStatus.OK: - self._token = response_token.json().get("access_token") - else: - raise RuntimeError( - f"Could not fetch OAuth token, got response : {response_token.status_code}" - ) - - def set_static_token(self, token): - """ - Define a static token to return - - Args: - token: String token - """ - self._static_token = token - - def __call__(self, context, callback): - """Passes authorization metadata into the given callback. - - Args: - context (grpc.AuthMetadataContext): The RPC context. - callback (grpc.AuthMetadataPluginCallback): The callback that will - be invoked to pass in the authorization metadata. - """ - callback(self.get_signed_meta(), None) - - -class GoogleOpenIDAuthMetadataPlugin(grpc.AuthMetadataPlugin): - """A `gRPC AuthMetadataPlugin`_ that inserts the credentials into each - request. - - .. _gRPC AuthMetadataPlugin: - http://www.grpc.io/grpc/python/grpc.html#grpc.AuthMetadataPlugin - """ - - def __init__(self, config: Config): - """ - Initializes a GoogleOpenIDAuthMetadataPlugin, used to sign gRPC requests - Args: - config: Feast Configuration object - """ - super(GoogleOpenIDAuthMetadataPlugin, self).__init__() - - self._static_token = None - self._token = None - self._token_expiry_ts = time.time() - - # If provided, set a static token - if config.exists(opt.AUTH_TOKEN): - self._static_token = config.get(opt.AUTH_TOKEN) - - self._request = RequestWithTimeout(timeout=5) - self._refresh_token() - - def get_signed_meta(self): - """ Creates a signed authorization metadata token.""" - - if time.time() > self._token_expiry_ts: - self._refresh_token() - return (("authorization", "Bearer {}".format(self._token)),) - - def _refresh_token(self): - """ Refreshes Google ID token and persists it in memory """ - - # Use static token if available - if self._static_token: - self._token = self._static_token - return - - from google.oauth2.id_token import fetch_id_token, verify_oauth2_token - - try: - self._token = fetch_id_token(self._request, audience="feast.dev") - self._token_expiry_ts = verify_oauth2_token(self._token, self._request)[ - "exp" - ] - return - except DefaultCredentialsError: - pass - - # Try to use Google Auth library to find ID Token - from google import auth as google_auth - - try: - credentials, _ = google_auth.default(["openid", "email"]) - credentials.refresh(self._request) - if hasattr(credentials, "id_token"): - self._token = credentials.id_token - self._token_expiry_ts = verify_oauth2_token(self._token, self._request)[ - "exp" - ] - return - except DefaultCredentialsError: - pass # Could not determine credentials, skip - - # Raise exception otherwise - raise RuntimeError( - "Could not determine Google ID token. Ensure that a service account can be found by setting" - " the GOOGLE_APPLICATION_CREDENTIALS environmental variable to its path." - ) - - def set_static_token(self, token): - """ - Define a static token to return - - Args: - token: String token - """ - self._static_token = token - - def __call__(self, context, callback): - """Passes authorization metadata into the given callback. - - Args: - context (grpc.AuthMetadataContext): The RPC context. - callback (grpc.AuthMetadataPluginCallback): The callback that will - be invoked to pass in the authorization metadata. - """ - callback(self.get_signed_meta(), None) - - -class RequestWithTimeout(grequests.Request): - def __init__(self, *args, timeout=None, **kwargs): - self._timeout = timeout - super().__init__(*args, **kwargs) - - def __call__(self, *args, **kwargs): - timeout = kwargs.pop("timeout", self._timeout) - return super().__call__(*args, timeout=timeout, **kwargs) diff --git a/sdk/python/feast/grpc/grpc.py b/sdk/python/feast/grpc/grpc.py deleted file mode 100644 index 28b21437a3..0000000000 --- a/sdk/python/feast/grpc/grpc.py +++ /dev/null @@ -1,53 +0,0 @@ -import grpc - - -def create_grpc_channel( - url: str, - enable_ssl: bool = False, - enable_auth: bool = False, - ssl_server_cert_path: str = None, - auth_metadata_plugin: grpc.AuthMetadataPlugin = None, - timeout: int = 3, -) -> grpc.Channel: - """ - Create a gRPC channel - Args: - url: gRPC URL to connect to - enable_ssl: Enable TLS/SSL, optionally provide a server side certificate - enable_auth: Enable user auth - ssl_server_cert_path: (optional) Path to certificate (used with - "enable SSL") - auth_metadata_plugin: Metadata plugin to use to sign requests, only used - with "enable auth" when SSL/TLS is enabled - timeout: Connection timeout to server - - Returns: Returns a grpc.Channel - """ - if not url: - raise ValueError("Unable to create gRPC channel. URL has not been defined.") - - if enable_ssl or url.endswith(":443"): - # User has provided a public key certificate - if ssl_server_cert_path: - with open(ssl_server_cert_path, "rb",) as f: - credentials = grpc.ssl_channel_credentials(f.read()) - # Guess the certificate location - else: - credentials = grpc.ssl_channel_credentials() - - # Authentication is enabled, add the metadata plugin in order to sign - # requests - if enable_auth: - credentials = grpc.composite_channel_credentials( - credentials, grpc.metadata_call_credentials(auth_metadata_plugin), - ) - channel = grpc.secure_channel(url, credentials=credentials) - else: - channel = grpc.insecure_channel(url) - try: - grpc.channel_ready_future(channel).result(timeout=timeout) - return channel - except grpc.FutureTimeoutError: - raise ConnectionError( - f"Connection timed out while attempting to connect to {url}" - ) diff --git a/sdk/python/feast/importer.py b/sdk/python/feast/importer.py index 5dcd7c71c1..bbd592101a 100644 --- a/sdk/python/feast/importer.py +++ b/sdk/python/feast/importer.py @@ -1,28 +1,47 @@ import importlib -from feast import errors +from feast.errors import ( + FeastClassImportError, + FeastInvalidBaseClass, + FeastModuleImportError, +) -def get_class_from_type(module_name: str, class_name: str, class_type: str): - if not class_name.endswith(class_type): - raise errors.FeastClassInvalidName(class_name, class_type) +def import_class(module_name: str, class_name: str, class_type: str = None): + """ + Dynamically loads and returns a class from a module. - # Try importing the module that contains the custom provider + Args: + module_name: The name of the module. + class_name: The name of the class. + class_type: Optional name of a base class of the class. + + Raises: + FeastInvalidBaseClass: If the class name does not end with the specified suffix. + FeastModuleImportError: If the module cannot be imported. + FeastClassImportError: If the class cannot be imported. + """ + # Try importing the module. try: module = importlib.import_module(module_name) except Exception as e: # The original exception can be anything - either module not found, # or any other kind of error happening during the module import time. # So we should include the original error as well in the stack trace. - raise errors.FeastModuleImportError(module_name, class_type) from e + raise FeastModuleImportError(module_name, class_name) from e - # Try getting the provider class definition + # Try getting the class. try: _class = getattr(module, class_name) except AttributeError: # This can only be one type of error, when class_name attribute does not exist in the module # So we don't have to include the original exception here - raise errors.FeastClassImportError( - module_name, class_name, class_type=class_type - ) from None + raise FeastClassImportError(module_name, class_name) from None + + # Check if the class is a subclass of the base class. + if class_type and not any( + base_class.__name__ == class_type for base_class in _class.mro() + ): + raise FeastInvalidBaseClass(class_name, class_type) + return _class diff --git a/sdk/python/feast/inference.py b/sdk/python/feast/inference.py index 721c34fb1a..6096095ce6 100644 --- a/sdk/python/feast/inference.py +++ b/sdk/python/feast/inference.py @@ -1,10 +1,11 @@ import re from typing import List -from feast import BigQuerySource, Entity, FileSource, RedshiftSource -from feast.data_source import DataSource +from feast import BigQuerySource, Entity, FileSource, RedshiftSource, SnowflakeSource +from feast.data_source import DataSource, PushSource, RequestSource from feast.errors import RegistryInferenceFailure from feast.feature_view import FeatureView +from feast.field import Field, from_value_type from feast.repo_config import RepoConfig from feast.value_type import ValueType @@ -13,7 +14,12 @@ def update_entities_with_inferred_types_from_feature_views( entities: List[Entity], feature_views: List[FeatureView], config: RepoConfig ) -> None: """ - Infer entity value type by examining schema of feature view input sources + Infers the types of the entities by examining the schemas of feature view batch sources. + + Args: + entities: The entities to be updated. + feature_views: A list containing feature views associated with the entities. + config: The config for the current feature store. """ incomplete_entities = { entity.name: entity @@ -26,22 +32,25 @@ def update_entities_with_inferred_types_from_feature_views( if not (incomplete_entities_keys & set(view.entities)): continue # skip if view doesn't contain any entities that need inference - col_names_and_types = view.input.get_table_column_names_and_types(config) + col_names_and_types = list( + view.batch_source.get_table_column_names_and_types(config) + ) for entity_name in view.entities: if entity_name in incomplete_entities: - # get entity information from information extracted from the view input source + entity = incomplete_entities[entity_name] + + # get entity information from information extracted from the view batch source extracted_entity_name_type_pairs = list( - filter(lambda tup: tup[0] == entity_name, col_names_and_types) + filter(lambda tup: tup[0] == entity.join_key, col_names_and_types,) ) if len(extracted_entity_name_type_pairs) == 0: # Doesn't mention inference error because would also be an error without inferencing raise ValueError( - f"""No column in the input source for the {view.name} feature view matches + f"""No column in the batch source for the {view.name} feature view matches its entity's name.""" ) - entity = incomplete_entities[entity_name] - inferred_value_type = view.input.source_datatype_to_feast_value_type()( + inferred_value_type = view.batch_source.source_datatype_to_feast_value_type()( extracted_entity_name_type_pairs[0][1] ) @@ -62,36 +71,48 @@ def update_entities_with_inferred_types_from_feature_views( def update_data_sources_with_inferred_event_timestamp_col( data_sources: List[DataSource], config: RepoConfig ) -> None: - ERROR_MSG_PREFIX = "Unable to infer DataSource event_timestamp_column" + ERROR_MSG_PREFIX = "Unable to infer DataSource timestamp_field" for data_source in data_sources: - if ( - data_source.event_timestamp_column is None - or data_source.event_timestamp_column == "" - ): + if isinstance(data_source, RequestSource): + continue + if isinstance(data_source, PushSource): + data_source = data_source.batch_source + if data_source.timestamp_field is None or data_source.timestamp_field == "": # prepare right match pattern for data source ts_column_type_regex_pattern = "" - if isinstance(data_source, FileSource): + # TODO(adchia): Move Spark source inference out of this logic + if ( + isinstance(data_source, FileSource) + or "SparkSource" == data_source.__class__.__name__ + ): ts_column_type_regex_pattern = r"^timestamp" elif isinstance(data_source, BigQuerySource): ts_column_type_regex_pattern = "TIMESTAMP|DATETIME" elif isinstance(data_source, RedshiftSource): ts_column_type_regex_pattern = "TIMESTAMP[A-Z]*" + elif isinstance(data_source, SnowflakeSource): + ts_column_type_regex_pattern = "TIMESTAMP_[A-Z]*" else: raise RegistryInferenceFailure( "DataSource", - """ - DataSource inferencing of event_timestamp_column is currently only supported - for FileSource and BigQuerySource. + f""" + DataSource inferencing of timestamp_field is currently only supported + for FileSource, SparkSource, BigQuerySource, RedshiftSource, and SnowflakeSource. + Attempting to infer from {data_source}. """, ) # for informing the type checker - assert isinstance(data_source, FileSource) or isinstance( - data_source, BigQuerySource + assert ( + isinstance(data_source, FileSource) + or isinstance(data_source, BigQuerySource) + or isinstance(data_source, RedshiftSource) + or isinstance(data_source, SnowflakeSource) + or "SparkSource" == data_source.__class__.__name__ ) # loop through table columns to find singular match - event_timestamp_column, matched_flag = None, False + timestamp_field, matched_flag = None, False for ( col_name, col_datatype, @@ -106,9 +127,10 @@ def update_data_sources_with_inferred_event_timestamp_col( """, ) matched_flag = True - event_timestamp_column = col_name + timestamp_field = col_name if matched_flag: - data_source.event_timestamp_column = event_timestamp_column + assert timestamp_field + data_source.timestamp_field = timestamp_field else: raise RegistryInferenceFailure( "DataSource", @@ -116,3 +138,74 @@ def update_data_sources_with_inferred_event_timestamp_col( {ERROR_MSG_PREFIX} due to an absence of columns that satisfy the criteria. """, ) + + +def update_feature_views_with_inferred_features( + fvs: List[FeatureView], entities: List[Entity], config: RepoConfig +) -> None: + """ + Infers the set of features associated to each FeatureView and updates the FeatureView with those features. + Inference occurs through considering each column of the underlying data source as a feature except columns that are + associated with the data source's timestamp columns and the FeatureView's entity columns. + + Args: + fvs: The feature views to be updated. + entities: A list containing entities associated with the feature views. + config: The config for the current feature store. + """ + entity_name_to_join_key_map = {entity.name: entity.join_key for entity in entities} + + for fv in fvs: + if not fv.features: + columns_to_exclude = { + fv.batch_source.timestamp_field, + fv.batch_source.created_timestamp_column, + } | { + entity_name_to_join_key_map[entity_name] for entity_name in fv.entities + } + + if fv.batch_source.timestamp_field in fv.batch_source.field_mapping: + columns_to_exclude.add( + fv.batch_source.field_mapping[fv.batch_source.timestamp_field] + ) + if ( + fv.batch_source.created_timestamp_column + in fv.batch_source.field_mapping + ): + columns_to_exclude.add( + fv.batch_source.field_mapping[ + fv.batch_source.created_timestamp_column + ] + ) + + for ( + col_name, + col_datatype, + ) in fv.batch_source.get_table_column_names_and_types(config): + if col_name not in columns_to_exclude and not re.match( + "^__|__$", + col_name, # double underscores often signal an internal-use column + ): + feature_name = ( + fv.batch_source.field_mapping[col_name] + if col_name in fv.batch_source.field_mapping + else col_name + ) + field = Field( + name=feature_name, + dtype=from_value_type( + fv.batch_source.source_datatype_to_feast_value_type()( + col_datatype + ) + ), + ) + # Note that schema and features are two different attributes of a + # FeatureView, and that features should be present in both. + fv.schema.append(field) + fv.features.append(field) + + if not fv.features: + raise RegistryInferenceFailure( + "FeatureView", + f"Could not infer Features for the FeatureView named {fv.name}.", + ) diff --git a/sdk/python/feast/infra/aws.py b/sdk/python/feast/infra/aws.py index f182bbbcee..b7cc61de0e 100644 --- a/sdk/python/feast/infra/aws.py +++ b/sdk/python/feast/infra/aws.py @@ -1,143 +1,413 @@ +import base64 +import hashlib +import logging +import os +import uuid from datetime import datetime -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +from pathlib import Path +from tempfile import TemporaryFile +from typing import Optional, Sequence +from urllib.parse import urlparse -import pandas -from tqdm import tqdm +from colorama import Fore, Style -from feast import FeatureTable +from feast.constants import ( + AWS_LAMBDA_FEATURE_SERVER_IMAGE, + AWS_LAMBDA_FEATURE_SERVER_REPOSITORY, + DOCKER_IMAGE_TAG_ENV_NAME, + FEAST_USAGE, + FEATURE_STORE_YAML_ENV_NAME, +) from feast.entity import Entity -from feast.feature_view import FeatureView -from feast.infra.offline_stores.helpers import get_offline_store_from_config -from feast.infra.online_stores.helpers import get_online_store_from_config -from feast.infra.provider import ( - Provider, - RetrievalJob, - _convert_arrow_to_proto, - _get_column_names, - _run_field_mapping, +from feast.errors import ( + AwsAPIGatewayDoesNotExist, + AwsLambdaDoesNotExist, + ExperimentalFeatureNotEnabled, + IncompatibleRegistryStoreClass, + RepoConfigPathDoesNotExist, + S3RegistryBucketForbiddenAccess, + S3RegistryBucketNotExist, ) -from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto -from feast.protos.feast.types.Value_pb2 import Value as ValueProto -from feast.registry import Registry -from feast.repo_config import RepoConfig +from feast.feature_view import FeatureView +from feast.flags import FLAG_AWS_LAMBDA_FEATURE_SERVER_NAME +from feast.flags_helper import enable_aws_lambda_feature_server +from feast.infra.feature_servers.aws_lambda.config import AwsLambdaFeatureServerConfig +from feast.infra.passthrough_provider import PassthroughProvider +from feast.infra.utils import aws_utils +from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto +from feast.registry import get_registry_store_class_from_scheme +from feast.registry_store import RegistryStore +from feast.repo_config import RegistryConfig +from feast.usage import log_exceptions_and_usage +from feast.version import get_version + +try: + import boto3 +except ImportError as e: + from feast.errors import FeastExtrasDependencyImportError + raise FeastExtrasDependencyImportError("aws", str(e)) -class AwsProvider(Provider): - def __init__(self, config: RepoConfig): - self.repo_config = config - self.offline_store = get_offline_store_from_config(config.offline_store) - self.online_store = get_online_store_from_config(config.online_store) +_logger = logging.getLogger(__name__) + +class AwsProvider(PassthroughProvider): + @log_exceptions_and_usage(provider="AwsProvider") def update_infra( self, project: str, - tables_to_delete: Sequence[Union[FeatureTable, FeatureView]], - tables_to_keep: Sequence[Union[FeatureTable, FeatureView]], + tables_to_delete: Sequence[FeatureView], + tables_to_keep: Sequence[FeatureView], entities_to_delete: Sequence[Entity], entities_to_keep: Sequence[Entity], partial: bool, ): - self.online_store.update( - config=self.repo_config, - tables_to_delete=tables_to_delete, - tables_to_keep=tables_to_keep, - entities_to_keep=entities_to_keep, - entities_to_delete=entities_to_delete, - partial=partial, - ) + # Call update only if there is an online store + if self.online_store: + self.online_store.update( + config=self.repo_config, + tables_to_delete=tables_to_delete, + tables_to_keep=tables_to_keep, + entities_to_keep=entities_to_keep, + entities_to_delete=entities_to_delete, + partial=partial, + ) + + if self.repo_config.feature_server and self.repo_config.feature_server.enabled: + if not enable_aws_lambda_feature_server(self.repo_config): + raise ExperimentalFeatureNotEnabled(FLAG_AWS_LAMBDA_FEATURE_SERVER_NAME) + + # Since the AWS Lambda feature server will attempt to load the registry, we + # only allow the registry to be in S3. + registry_path = ( + self.repo_config.registry + if isinstance(self.repo_config.registry, str) + else self.repo_config.registry.path + ) + registry_store_class = get_registry_store_class_from_scheme(registry_path) + if registry_store_class != S3RegistryStore: + raise IncompatibleRegistryStoreClass( + registry_store_class.__name__, S3RegistryStore.__name__ + ) + + ecr_client = boto3.client("ecr") + docker_image_version = _get_docker_image_version() + repository_uri = self._create_or_get_repository_uri(ecr_client) + # Only download & upload the docker image if it doesn't already exist in ECR + if not ecr_client.batch_get_image( + repositoryName=AWS_LAMBDA_FEATURE_SERVER_REPOSITORY, + imageIds=[{"imageTag": docker_image_version}], + ).get("images"): + image_uri = self._upload_docker_image( + ecr_client, repository_uri, docker_image_version + ) + else: + image_uri = f"{repository_uri}:{docker_image_version}" + + self._deploy_feature_server(project, image_uri) + + def _deploy_feature_server(self, project: str, image_uri: str): + _logger.info("Deploying feature server...") + + if not self.repo_config.repo_path: + raise RepoConfigPathDoesNotExist() + with open(self.repo_config.repo_path / "feature_store.yaml", "rb") as f: + config_bytes = f.read() + config_base64 = base64.b64encode(config_bytes).decode() + + resource_name = _get_lambda_name(project) + lambda_client = boto3.client("lambda") + api_gateway_client = boto3.client("apigatewayv2") + function = aws_utils.get_lambda_function(lambda_client, resource_name) + _logger.debug("Using function name: %s", resource_name) + _logger.debug("Found function: %s", function) + + if function is None: + # If the Lambda function does not exist, create it. + _logger.info(" Creating AWS Lambda...") + assert isinstance( + self.repo_config.feature_server, AwsLambdaFeatureServerConfig + ) + lambda_client.create_function( + FunctionName=resource_name, + Role=self.repo_config.feature_server.execution_role_name, + Code={"ImageUri": image_uri}, + PackageType="Image", + MemorySize=1769, + Environment={ + "Variables": { + FEATURE_STORE_YAML_ENV_NAME: config_base64, + FEAST_USAGE: "False", + } + }, + Tags={ + "feast-owned": "True", + "project": project, + "feast-sdk-version": get_version(), + }, + ) + function = aws_utils.get_lambda_function(lambda_client, resource_name) + if not function: + raise AwsLambdaDoesNotExist(resource_name) + else: + # If the feature_store.yaml has changed, need to update the environment variable. + env = function.get("Environment", {}).get("Variables", {}) + if env.get(FEATURE_STORE_YAML_ENV_NAME) != config_base64: + # Note, that this does not update Lambda gracefully (e.g. no rolling deployment). + # It's expected that feature_store.yaml is not regularly updated while the lambda + # is serving production traffic. However, the update in registry (e.g. modifying + # feature views, feature services, and other definitions does not update lambda). + _logger.info(" Updating AWS Lambda...") + + aws_utils.update_lambda_function_environment( + lambda_client, + resource_name, + {"Variables": {FEATURE_STORE_YAML_ENV_NAME: config_base64}}, + ) + + api = aws_utils.get_first_api_gateway(api_gateway_client, resource_name) + if not api: + # If the API Gateway doesn't exist, create it + _logger.info(" Creating AWS API Gateway...") + api = api_gateway_client.create_api( + Name=resource_name, + ProtocolType="HTTP", + Target=function["FunctionArn"], + RouteKey="POST /get-online-features", + Tags={ + "feast-owned": "True", + "project": project, + "feast-sdk-version": get_version(), + }, + ) + if not api: + raise AwsAPIGatewayDoesNotExist(resource_name) + # Make sure to give AWS Lambda a permission to be invoked by the newly created API Gateway + api_id = api["ApiId"] + region = lambda_client.meta.region_name + account_id = aws_utils.get_account_id() + lambda_client.add_permission( + FunctionName=function["FunctionArn"], + StatementId=str(uuid.uuid4()), + Action="lambda:InvokeFunction", + Principal="apigateway.amazonaws.com", + SourceArn=f"arn:aws:execute-api:{region}:{account_id}:{api_id}/*/*/get-online-features", + ) + @log_exceptions_and_usage(provider="AwsProvider") def teardown_infra( - self, - project: str, - tables: Sequence[Union[FeatureTable, FeatureView]], - entities: Sequence[Entity], + self, project: str, tables: Sequence[FeatureView], entities: Sequence[Entity], ) -> None: - self.online_store.teardown(self.repo_config, tables, entities) + if self.online_store: + self.online_store.teardown(self.repo_config, tables, entities) - def online_write_batch( - self, - config: RepoConfig, - table: Union[FeatureTable, FeatureView], - data: List[ - Tuple[EntityKeyProto, Dict[str, ValueProto], datetime, Optional[datetime]] - ], - progress: Optional[Callable[[int], Any]], - ) -> None: - self.online_store.online_write_batch(config, table, data, progress) + if ( + self.repo_config.feature_server is not None + and self.repo_config.feature_server.enabled + ): + _logger.info("Tearing down feature server...") + resource_name = _get_lambda_name(project) + lambda_client = boto3.client("lambda") + api_gateway_client = boto3.client("apigatewayv2") - def online_read( - self, - config: RepoConfig, - table: Union[FeatureTable, FeatureView], - entity_keys: List[EntityKeyProto], - requested_features: List[str] = None, - ) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]: - result = self.online_store.online_read(config, table, entity_keys) + function = aws_utils.get_lambda_function(lambda_client, resource_name) - return result + if function is not None: + _logger.info(" Tearing down AWS Lambda...") + aws_utils.delete_lambda_function(lambda_client, resource_name) - def materialize_single_feature_view( - self, - config: RepoConfig, - feature_view: FeatureView, - start_date: datetime, - end_date: datetime, - registry: Registry, - project: str, - tqdm_builder: Callable[[int], tqdm], - ) -> None: - entities = [] - for entity_name in feature_view.entities: - entities.append(registry.get_entity(entity_name, project)) - - ( - join_key_columns, - feature_name_columns, - event_timestamp_column, - created_timestamp_column, - ) = _get_column_names(feature_view, entities) - - offline_job = self.offline_store.pull_latest_from_table_or_query( - config=config, - data_source=feature_view.input, - join_key_columns=join_key_columns, - feature_name_columns=feature_name_columns, - event_timestamp_column=event_timestamp_column, - created_timestamp_column=created_timestamp_column, - start_date=start_date, - end_date=end_date, + api = aws_utils.get_first_api_gateway(api_gateway_client, resource_name) + if api is not None: + _logger.info(" Tearing down AWS API Gateway...") + aws_utils.delete_api_gateway(api_gateway_client, api["ApiId"]) + + @log_exceptions_and_usage(provider="AwsProvider") + def get_feature_server_endpoint(self) -> Optional[str]: + project = self.repo_config.project + resource_name = _get_lambda_name(project) + api_gateway_client = boto3.client("apigatewayv2") + api = aws_utils.get_first_api_gateway(api_gateway_client, resource_name) + + if not api: + return None + + api_id = api["ApiId"] + lambda_client = boto3.client("lambda") + region = lambda_client.meta.region_name + return f"https://{api_id}.execute-api.{region}.amazonaws.com" + + def _upload_docker_image( + self, ecr_client, repository_uri: str, docker_image_version: str + ) -> str: + """ + Pulls the AWS Lambda docker image from Dockerhub and uploads it to AWS ECR. + + Returns: + The URI of the uploaded docker image. + """ + try: + import docker + from docker.errors import APIError + except ImportError as e: + from feast.errors import FeastExtrasDependencyImportError + + raise FeastExtrasDependencyImportError("docker", str(e)) + + try: + docker_client = docker.from_env() + except APIError: + from feast.errors import DockerDaemonNotRunning + + raise DockerDaemonNotRunning() + + dockerhub_image = f"{AWS_LAMBDA_FEATURE_SERVER_IMAGE}:{docker_image_version}" + _logger.info( + f"Pulling remote image {Style.BRIGHT + Fore.GREEN}{dockerhub_image}{Style.RESET_ALL}" ) + for line in docker_client.api.pull(dockerhub_image, stream=True, decode=True): + _logger.debug(f" {line}") + + auth_token = ecr_client.get_authorization_token()["authorizationData"][0][ + "authorizationToken" + ] + username, password = base64.b64decode(auth_token).decode("utf-8").split(":") - table = offline_job.to_arrow() + ecr_address = repository_uri.split("/")[0] + _logger.info( + f"Logging in Docker client to {Style.BRIGHT + Fore.GREEN}{ecr_address}{Style.RESET_ALL}" + ) + login_status = docker_client.login( + username=username, password=password, registry=ecr_address + ) + _logger.debug(f" {login_status}") - if feature_view.input.field_mapping is not None: - table = _run_field_mapping(table, feature_view.input.field_mapping) + image = docker_client.images.get(dockerhub_image) + image_remote_name = f"{repository_uri}:{docker_image_version}" + _logger.info( + f"Pushing local image to remote {Style.BRIGHT + Fore.GREEN}{image_remote_name}{Style.RESET_ALL}" + ) + image.tag(image_remote_name) + for line in docker_client.api.push( + repository_uri, tag=docker_image_version, stream=True, decode=True + ): + _logger.debug(f" {line}") - join_keys = [entity.join_key for entity in entities] - rows_to_write = _convert_arrow_to_proto(table, feature_view, join_keys) + return image_remote_name - with tqdm_builder(len(rows_to_write)) as pbar: - self.online_write_batch( - self.repo_config, feature_view, rows_to_write, lambda x: pbar.update(x) + def _create_or_get_repository_uri(self, ecr_client): + try: + return ecr_client.describe_repositories( + repositoryNames=[AWS_LAMBDA_FEATURE_SERVER_REPOSITORY] + )["repositories"][0]["repositoryUri"] + except ecr_client.exceptions.RepositoryNotFoundException: + _logger.info( + f"Creating remote ECR repository {Style.BRIGHT + Fore.GREEN}{AWS_LAMBDA_FEATURE_SERVER_REPOSITORY}{Style.RESET_ALL}" + ) + response = ecr_client.create_repository( + repositoryName=AWS_LAMBDA_FEATURE_SERVER_REPOSITORY ) + return response["repository"]["repositoryUri"] - def get_historical_features( - self, - config: RepoConfig, - feature_views: List[FeatureView], - feature_refs: List[str], - entity_df: Union[pandas.DataFrame, str], - registry: Registry, - project: str, - full_feature_names: bool, - ) -> RetrievalJob: - job = self.offline_store.get_historical_features( - config=config, - feature_views=feature_views, - feature_refs=feature_refs, - entity_df=entity_df, - registry=registry, - project=project, - full_feature_names=full_feature_names, + +def _get_lambda_name(project: str): + lambda_prefix = AWS_LAMBDA_FEATURE_SERVER_REPOSITORY + lambda_suffix = f"{project}-{_get_docker_image_version().replace('.', '_')}" + # AWS Lambda name can't have the length greater than 64 bytes. + # This usually occurs during integration tests where feast version is long + if len(lambda_prefix) + len(lambda_suffix) >= 63: + lambda_suffix = hashlib.md5(lambda_suffix.encode()).hexdigest() + return f"{lambda_prefix}-{lambda_suffix}" + + +def _get_docker_image_version() -> str: + """Returns a version for the feature server Docker image. + + If the feast.constants.DOCKER_IMAGE_TAG_ENV_NAME environment variable is set, + we return that (mostly used for integration tests, but can be used for local testing too). + + For public Feast releases this equals to the Feast SDK version modified by replacing "." with "_". + For example, Feast SDK version "0.14.1" would correspond to Docker image version "0_14_1". + + During development (when Feast is installed in editable mode) this equals to the Feast SDK version + modified by removing the "dev..." suffix and replacing "." with "_". For example, Feast SDK version + "0.14.1.dev41+g1cbfa225.d20211103" would correspond to Docker image version "0_14_1". This way, + Feast SDK will use an already existing Docker image built during the previous public release. + + """ + tag = os.environ.get(DOCKER_IMAGE_TAG_ENV_NAME) + if tag is not None: + return tag + else: + version = get_version() + if "dev" in version: + version = version[: version.find("dev") - 1] + _logger.warning( + "You are trying to use AWS Lambda feature server while Feast is in a development mode. " + f"Feast will use a docker image version {version} derived from Feast SDK " + f"version {get_version()}. If you want to update the Feast SDK version, make " + "sure to first fetch all new release tags from Github and then reinstall the library:\n" + "> git fetch --all --tags\n" + "> pip install -e sdk/python" + ) + return version + + +class S3RegistryStore(RegistryStore): + def __init__(self, registry_config: RegistryConfig, repo_path: Path): + uri = registry_config.path + self._uri = urlparse(uri) + self._bucket = self._uri.hostname + self._key = self._uri.path.lstrip("/") + + self.s3_client = boto3.resource( + "s3", endpoint_url=os.environ.get("FEAST_S3_ENDPOINT_URL") ) - return job + + @log_exceptions_and_usage(registry="s3") + def get_registry_proto(self): + file_obj = TemporaryFile() + registry_proto = RegistryProto() + try: + from botocore.exceptions import ClientError + except ImportError as e: + from feast.errors import FeastExtrasDependencyImportError + + raise FeastExtrasDependencyImportError("aws", str(e)) + try: + bucket = self.s3_client.Bucket(self._bucket) + self.s3_client.meta.client.head_bucket(Bucket=bucket.name) + except ClientError as e: + # If a client error is thrown, then check that it was a 404 error. + # If it was a 404 error, then the bucket does not exist. + error_code = int(e.response["Error"]["Code"]) + if error_code == 404: + raise S3RegistryBucketNotExist(self._bucket) + else: + raise S3RegistryBucketForbiddenAccess(self._bucket) from e + + try: + obj = bucket.Object(self._key) + obj.download_fileobj(file_obj) + file_obj.seek(0) + registry_proto.ParseFromString(file_obj.read()) + return registry_proto + except ClientError as e: + raise FileNotFoundError( + f"Error while trying to locate Registry at path {self._uri.geturl()}" + ) from e + + @log_exceptions_and_usage(registry="s3") + def update_registry_proto(self, registry_proto: RegistryProto): + self._write_registry(registry_proto) + + def teardown(self): + self.s3_client.Object(self._bucket, self._key).delete() + + def _write_registry(self, registry_proto: RegistryProto): + registry_proto.version_id = str(uuid.uuid4()) + registry_proto.last_updated.FromDatetime(datetime.utcnow()) + # we have already checked the bucket exists so no need to do it again + file_obj = TemporaryFile() + file_obj.write(registry_proto.SerializeToString()) + file_obj.seek(0) + self.s3_client.Bucket(self._bucket).put_object(Body=file_obj, Key=self._key) diff --git a/sdk/python/feast/infra/feature_servers/aws_lambda/Dockerfile b/sdk/python/feast/infra/feature_servers/aws_lambda/Dockerfile new file mode 100644 index 0000000000..0c342a77ce --- /dev/null +++ b/sdk/python/feast/infra/feature_servers/aws_lambda/Dockerfile @@ -0,0 +1,19 @@ +FROM public.ecr.aws/lambda/python:3.9 + +# Copy app handler code +COPY sdk/python/feast/infra/feature_servers/aws_lambda/app.py ${LAMBDA_TASK_ROOT} + +# Copy necessary parts of the Feast codebase +COPY sdk/python sdk/python +COPY protos protos +COPY go go +COPY README.md README.md + +# Install Feast for AWS with Lambda dependencies +# TODO(felixwang9817): Remove Snowflake dependencies once lazy loading of offline stores is supported. +# See https://github.com/feast-dev/feast/issues/2566 for more details. +RUN pip3 install -e 'sdk/python[aws,redis,snowflake]' +RUN pip3 install -r sdk/python/feast/infra/feature_servers/aws_lambda/requirements.txt --target "${LAMBDA_TASK_ROOT}" + +# Set the CMD to your handler (could also be done as a parameter override outside of the Dockerfile) +CMD [ "app.handler" ] diff --git a/sdk/python/feast/staging/__init__.py b/sdk/python/feast/infra/feature_servers/aws_lambda/__init__.py similarity index 100% rename from sdk/python/feast/staging/__init__.py rename to sdk/python/feast/infra/feature_servers/aws_lambda/__init__.py diff --git a/sdk/python/feast/infra/feature_servers/aws_lambda/app.py b/sdk/python/feast/infra/feature_servers/aws_lambda/app.py new file mode 100644 index 0000000000..e90364ed68 --- /dev/null +++ b/sdk/python/feast/infra/feature_servers/aws_lambda/app.py @@ -0,0 +1,27 @@ +import base64 +import os +import tempfile +from pathlib import Path + +from mangum import Mangum + +from feast import FeatureStore +from feast.constants import FEATURE_STORE_YAML_ENV_NAME +from feast.feature_server import get_app + +# Load RepoConfig +config_base64 = os.environ[FEATURE_STORE_YAML_ENV_NAME] +config_bytes = base64.b64decode(config_base64) + +# Create a new unique directory for writing feature_store.yaml +repo_path = Path(tempfile.mkdtemp()) + +with open(repo_path / "feature_store.yaml", "wb") as f: + f.write(config_bytes) + +# Initialize the feature store +store = FeatureStore(repo_path=str(repo_path.resolve())) + +# Create the FastAPI app and AWS Lambda handler +app = get_app(store) +handler = Mangum(app) diff --git a/sdk/python/feast/infra/feature_servers/aws_lambda/config.py b/sdk/python/feast/infra/feature_servers/aws_lambda/config.py new file mode 100644 index 0000000000..d026415ec3 --- /dev/null +++ b/sdk/python/feast/infra/feature_servers/aws_lambda/config.py @@ -0,0 +1,23 @@ +from pydantic import StrictBool, StrictStr +from pydantic.typing import Literal + +from feast.repo_config import FeastConfigBaseModel + + +class AwsLambdaFeatureServerConfig(FeastConfigBaseModel): + """Feature server config for AWS Lambda.""" + + type: Literal["aws_lambda"] = "aws_lambda" + """Feature server type selector.""" + + enabled: StrictBool = False + """Whether the feature server should be launched.""" + + public: StrictBool = True + """Whether the endpoint should be publicly accessible.""" + + auth: Literal["none", "api-key"] = "none" + """Authentication method for the endpoint.""" + + execution_role_name: StrictStr + """The execution role for the AWS Lambda function.""" diff --git a/sdk/python/feast/infra/feature_servers/aws_lambda/requirements.txt b/sdk/python/feast/infra/feature_servers/aws_lambda/requirements.txt new file mode 100644 index 0000000000..845aa14802 --- /dev/null +++ b/sdk/python/feast/infra/feature_servers/aws_lambda/requirements.txt @@ -0,0 +1,2 @@ +fastapi +mangum diff --git a/sdk/python/feast/infra/feature_servers/gcp_cloudrun/Dockerfile b/sdk/python/feast/infra/feature_servers/gcp_cloudrun/Dockerfile new file mode 100644 index 0000000000..b4e1e4adb9 --- /dev/null +++ b/sdk/python/feast/infra/feature_servers/gcp_cloudrun/Dockerfile @@ -0,0 +1,28 @@ +FROM python:3.9-slim + +# Allow statements and log messages to immediately appear in the Knative logs +ENV PYTHONUNBUFFERED True + +# Copy local code to the container image. +ENV APP_HOME /app +WORKDIR $APP_HOME + +# Copy app handler code +COPY sdk/python/feast/infra/feature_servers/gcp_cloudrun/app.py ./app.py + +# Copy necessary parts of the Feast codebase +COPY sdk/python ./sdk/python +COPY protos ./protos +COPY README.md ./README.md + +# Install production dependencies. +RUN pip install --no-cache-dir \ + -e 'sdk/python[gcp,redis]' \ + -r ./sdk/python/feast/infra/feature_servers/gcp_cloudrun/requirements.txt + +# Run the web service on container startup. Here we use the gunicorn +# webserver, with one worker process and 8 threads. +# For environments with multiple CPU cores, increase the number of workers +# to be equal to the cores available. +# Timeout is set to 0 to disable the timeouts of the workers to allow Cloud Run to handle instance scaling. +CMD exec gunicorn -k uvicorn.workers.UvicornWorker --bind :$PORT --workers 1 --threads 8 --timeout 0 app:app diff --git a/sdk/python/tensorflow_metadata/__init__.py b/sdk/python/feast/infra/feature_servers/gcp_cloudrun/__init__.py similarity index 100% rename from sdk/python/tensorflow_metadata/__init__.py rename to sdk/python/feast/infra/feature_servers/gcp_cloudrun/__init__.py diff --git a/sdk/python/feast/infra/feature_servers/gcp_cloudrun/app.py b/sdk/python/feast/infra/feature_servers/gcp_cloudrun/app.py new file mode 100644 index 0000000000..06749b0cd3 --- /dev/null +++ b/sdk/python/feast/infra/feature_servers/gcp_cloudrun/app.py @@ -0,0 +1,24 @@ +import base64 +import os +import tempfile +from pathlib import Path + +from feast import FeatureStore +from feast.constants import FEATURE_STORE_YAML_ENV_NAME +from feast.feature_server import get_app + +# Load RepoConfig +config_base64 = os.environ[FEATURE_STORE_YAML_ENV_NAME] +config_bytes = base64.b64decode(config_base64) + +# Create a new unique directory for writing feature_store.yaml +repo_path = Path(tempfile.mkdtemp()) + +with open(repo_path / "feature_store.yaml", "wb") as f: + f.write(config_bytes) + +# Initialize the feature store +store = FeatureStore(repo_path=str(repo_path.resolve())) + +# Create the FastAPI app +app = get_app(store) diff --git a/sdk/python/feast/infra/feature_servers/gcp_cloudrun/config.py b/sdk/python/feast/infra/feature_servers/gcp_cloudrun/config.py new file mode 100644 index 0000000000..728ac56ae2 --- /dev/null +++ b/sdk/python/feast/infra/feature_servers/gcp_cloudrun/config.py @@ -0,0 +1,20 @@ +from pydantic import StrictBool +from pydantic.typing import Literal + +from feast.repo_config import FeastConfigBaseModel + + +class GcpCloudRunFeatureServerConfig(FeastConfigBaseModel): + """Feature server config for GCP CloudRun.""" + + type: Literal["gcp_cloudrun"] = "gcp_cloudrun" + """Feature server type selector.""" + + enabled: StrictBool = False + """Whether the feature server should be launched.""" + + public: StrictBool = True + """Whether the endpoint should be publicly accessible.""" + + auth: Literal["none", "api-key"] = "none" + """Authentication method for the endpoint.""" diff --git a/sdk/python/feast/infra/feature_servers/gcp_cloudrun/requirements.txt b/sdk/python/feast/infra/feature_servers/gcp_cloudrun/requirements.txt new file mode 100644 index 0000000000..8f22dccf99 --- /dev/null +++ b/sdk/python/feast/infra/feature_servers/gcp_cloudrun/requirements.txt @@ -0,0 +1 @@ +gunicorn diff --git a/sdk/python/feast/infra/gcp.py b/sdk/python/feast/infra/gcp.py index 2662a6e54f..257ae38d02 100644 --- a/sdk/python/feast/infra/gcp.py +++ b/sdk/python/feast/infra/gcp.py @@ -1,145 +1,84 @@ +import uuid from datetime import datetime -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - -import pandas -from tqdm import tqdm - -from feast import FeatureTable -from feast.entity import Entity -from feast.feature_view import FeatureView -from feast.infra.offline_stores.helpers import get_offline_store_from_config -from feast.infra.online_stores.helpers import get_online_store_from_config -from feast.infra.provider import ( - Provider, - RetrievalJob, - _convert_arrow_to_proto, - _get_column_names, - _run_field_mapping, -) -from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto -from feast.protos.feast.types.Value_pb2 import Value as ValueProto -from feast.registry import Registry -from feast.repo_config import RepoConfig - - -class GcpProvider(Provider): - _gcp_project_id: Optional[str] - _namespace: Optional[str] - - def __init__(self, config: RepoConfig): - self.repo_config = config - self.offline_store = get_offline_store_from_config(config.offline_store) - self.online_store = get_online_store_from_config(config.online_store) - - def update_infra( - self, - project: str, - tables_to_delete: Sequence[Union[FeatureTable, FeatureView]], - tables_to_keep: Sequence[Union[FeatureTable, FeatureView]], - entities_to_delete: Sequence[Entity], - entities_to_keep: Sequence[Entity], - partial: bool, - ): - self.online_store.update( - config=self.repo_config, - tables_to_delete=tables_to_delete, - tables_to_keep=tables_to_keep, - entities_to_keep=entities_to_keep, - entities_to_delete=entities_to_delete, - partial=partial, - ) - - def teardown_infra( - self, - project: str, - tables: Sequence[Union[FeatureTable, FeatureView]], - entities: Sequence[Entity], - ) -> None: - self.online_store.teardown(self.repo_config, tables, entities) - - def online_write_batch( - self, - config: RepoConfig, - table: Union[FeatureTable, FeatureView], - data: List[ - Tuple[EntityKeyProto, Dict[str, ValueProto], datetime, Optional[datetime]] - ], - progress: Optional[Callable[[int], Any]], - ) -> None: - self.online_store.online_write_batch(config, table, data, progress) - - def online_read( - self, - config: RepoConfig, - table: Union[FeatureTable, FeatureView], - entity_keys: List[EntityKeyProto], - requested_features: List[str] = None, - ) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]: - result = self.online_store.online_read(config, table, entity_keys) - - return result - - def materialize_single_feature_view( - self, - config: RepoConfig, - feature_view: FeatureView, - start_date: datetime, - end_date: datetime, - registry: Registry, - project: str, - tqdm_builder: Callable[[int], tqdm], - ) -> None: - entities = [] - for entity_name in feature_view.entities: - entities.append(registry.get_entity(entity_name, project)) - - ( - join_key_columns, - feature_name_columns, - event_timestamp_column, - created_timestamp_column, - ) = _get_column_names(feature_view, entities) - - offline_job = self.offline_store.pull_latest_from_table_or_query( - config=config, - data_source=feature_view.input, - join_key_columns=join_key_columns, - feature_name_columns=feature_name_columns, - event_timestamp_column=event_timestamp_column, - created_timestamp_column=created_timestamp_column, - start_date=start_date, - end_date=end_date, - ) - table = offline_job.to_arrow() - - if feature_view.input.field_mapping is not None: - table = _run_field_mapping(table, feature_view.input.field_mapping) - - join_keys = [entity.join_key for entity in entities] - rows_to_write = _convert_arrow_to_proto(table, feature_view, join_keys) - - with tqdm_builder(len(rows_to_write)) as pbar: - self.online_write_batch( - self.repo_config, feature_view, rows_to_write, lambda x: pbar.update(x) +from pathlib import Path +from tempfile import TemporaryFile +from urllib.parse import urlparse + +from feast.infra.passthrough_provider import PassthroughProvider +from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto +from feast.registry_store import RegistryStore +from feast.repo_config import RegistryConfig +from feast.usage import log_exceptions_and_usage + + +class GcpProvider(PassthroughProvider): + """ + This class only exists for backwards compatibility. + """ + + pass + + +class GCSRegistryStore(RegistryStore): + def __init__(self, registry_config: RegistryConfig, repo_path: Path): + uri = registry_config.path + try: + import google.cloud.storage as storage + except ImportError as e: + from feast.errors import FeastExtrasDependencyImportError + + raise FeastExtrasDependencyImportError("gcp", str(e)) + + self.gcs_client = storage.Client() + self._uri = urlparse(uri) + self._bucket = self._uri.hostname + self._blob = self._uri.path.lstrip("/") + + @log_exceptions_and_usage(registry="gs") + def get_registry_proto(self): + import google.cloud.storage as storage + from google.cloud.exceptions import NotFound + + file_obj = TemporaryFile() + registry_proto = RegistryProto() + try: + bucket = self.gcs_client.get_bucket(self._bucket) + except NotFound: + raise Exception( + f"No bucket named {self._bucket} exists; please create it first." ) - - def get_historical_features( - self, - config: RepoConfig, - feature_views: List[FeatureView], - feature_refs: List[str], - entity_df: Union[pandas.DataFrame, str], - registry: Registry, - project: str, - full_feature_names: bool, - ) -> RetrievalJob: - job = self.offline_store.get_historical_features( - config=config, - feature_views=feature_views, - feature_refs=feature_refs, - entity_df=entity_df, - registry=registry, - project=project, - full_feature_names=full_feature_names, + if storage.Blob(bucket=bucket, name=self._blob).exists(self.gcs_client): + self.gcs_client.download_blob_to_file( + self._uri.geturl(), file_obj, timeout=30 + ) + file_obj.seek(0) + registry_proto.ParseFromString(file_obj.read()) + return registry_proto + raise FileNotFoundError( + f'Registry not found at path "{self._uri.geturl()}". Have you run "feast apply"?' ) - return job + + @log_exceptions_and_usage(registry="gs") + def update_registry_proto(self, registry_proto: RegistryProto): + self._write_registry(registry_proto) + + def teardown(self): + from google.cloud.exceptions import NotFound + + gs_bucket = self.gcs_client.get_bucket(self._bucket) + try: + gs_bucket.delete_blob(self._blob) + except NotFound: + # If the blob deletion fails with NotFound, it has already been deleted. + pass + + def _write_registry(self, registry_proto: RegistryProto): + registry_proto.version_id = str(uuid.uuid4()) + registry_proto.last_updated.FromDatetime(datetime.utcnow()) + # we have already checked the bucket exists so no need to do it again + gs_bucket = self.gcs_client.get_bucket(self._bucket) + blob = gs_bucket.blob(self._blob) + file_obj = TemporaryFile() + file_obj.write(registry_proto.SerializeToString()) + file_obj.seek(0) + blob.upload_from_file(file_obj) diff --git a/sdk/python/feast/infra/infra_object.py b/sdk/python/feast/infra/infra_object.py new file mode 100644 index 0000000000..91770e64e5 --- /dev/null +++ b/sdk/python/feast/infra/infra_object.py @@ -0,0 +1,160 @@ +# Copyright 2021 The Feast Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from abc import ABC, abstractmethod +from dataclasses import dataclass, field +from typing import Any, List + +from feast.errors import FeastInvalidInfraObjectType +from feast.importer import import_class +from feast.protos.feast.core.DatastoreTable_pb2 import ( + DatastoreTable as DatastoreTableProto, +) +from feast.protos.feast.core.DynamoDBTable_pb2 import ( + DynamoDBTable as DynamoDBTableProto, +) +from feast.protos.feast.core.InfraObject_pb2 import Infra as InfraProto +from feast.protos.feast.core.InfraObject_pb2 import InfraObject as InfraObjectProto +from feast.protos.feast.core.SqliteTable_pb2 import SqliteTable as SqliteTableProto + +DATASTORE_INFRA_OBJECT_CLASS_TYPE = "feast.infra.online_stores.datastore.DatastoreTable" +DYNAMODB_INFRA_OBJECT_CLASS_TYPE = "feast.infra.online_stores.dynamodb.DynamoDBTable" +SQLITE_INFRA_OBJECT_CLASS_TYPE = "feast.infra.online_stores.sqlite.SqliteTable" + + +class InfraObject(ABC): + """ + Represents a single infrastructure object (e.g. online store table) managed by Feast. + """ + + @abstractmethod + def __init__(self, name: str): + self._name = name + + @property + def name(self) -> str: + return self._name + + @abstractmethod + def to_infra_object_proto(self) -> InfraObjectProto: + """Converts an InfraObject to its protobuf representation, wrapped in an InfraObjectProto.""" + pass + + @abstractmethod + def to_proto(self) -> Any: + """Converts an InfraObject to its protobuf representation.""" + pass + + def __lt__(self, other) -> bool: + return self.name < other.name + + @staticmethod + @abstractmethod + def from_infra_object_proto(infra_object_proto: InfraObjectProto) -> Any: + """ + Returns an InfraObject created from a protobuf representation. + + Args: + infra_object_proto: A protobuf representation of an InfraObject. + + Raises: + FeastInvalidInfraObjectType: The type of InfraObject could not be identified. + """ + if infra_object_proto.infra_object_class_type: + cls = _get_infra_object_class_from_type( + infra_object_proto.infra_object_class_type + ) + return cls.from_infra_object_proto(infra_object_proto) + + raise FeastInvalidInfraObjectType() + + @staticmethod + def from_proto(infra_object_proto: Any) -> Any: + """ + Converts a protobuf representation of a subclass to an object of that subclass. + + Args: + infra_object_proto: A protobuf representation of an InfraObject. + + Raises: + FeastInvalidInfraObjectType: The type of InfraObject could not be identified. + """ + if isinstance(infra_object_proto, DatastoreTableProto): + infra_object_class_type = DATASTORE_INFRA_OBJECT_CLASS_TYPE + elif isinstance(infra_object_proto, DynamoDBTableProto): + infra_object_class_type = DYNAMODB_INFRA_OBJECT_CLASS_TYPE + elif isinstance(infra_object_proto, SqliteTableProto): + infra_object_class_type = SQLITE_INFRA_OBJECT_CLASS_TYPE + else: + raise FeastInvalidInfraObjectType() + + cls = _get_infra_object_class_from_type(infra_object_class_type) + return cls.from_proto(infra_object_proto) + + @abstractmethod + def update(self): + """ + Deploys or updates the infrastructure object. + """ + pass + + @abstractmethod + def teardown(self): + """ + Tears down the infrastructure object. + """ + pass + + +@dataclass +class Infra: + """ + Represents the set of infrastructure managed by Feast. + + Args: + infra_objects: A list of InfraObjects, each representing one infrastructure object. + """ + + infra_objects: List[InfraObject] = field(default_factory=list) + + def to_proto(self) -> InfraProto: + """ + Converts Infra to its protobuf representation. + + Returns: + An InfraProto protobuf. + """ + infra_proto = InfraProto() + for infra_object in self.infra_objects: + infra_object_proto = infra_object.to_infra_object_proto() + infra_proto.infra_objects.append(infra_object_proto) + + return infra_proto + + @classmethod + def from_proto(cls, infra_proto: InfraProto): + """ + Returns an Infra object created from a protobuf representation. + """ + infra = cls() + infra.infra_objects += [ + InfraObject.from_infra_object_proto(infra_object_proto) + for infra_object_proto in infra_proto.infra_objects + ] + + return infra + + +def _get_infra_object_class_from_type(infra_object_class_type: str): + module_name, infra_object_class_name = infra_object_class_type.rsplit(".", 1) + return import_class(module_name, infra_object_class_name) diff --git a/sdk/python/feast/infra/key_encoding_utils.py b/sdk/python/feast/infra/key_encoding_utils.py index eb3a5ea1cc..8333610473 100644 --- a/sdk/python/feast/infra/key_encoding_utils.py +++ b/sdk/python/feast/infra/key_encoding_utils.py @@ -19,6 +19,22 @@ def _serialize_val(value_type, v: ValueProto) -> Tuple[bytes, int]: raise ValueError(f"Value type not supported for Firestore: {v}") +def serialize_entity_key_prefix(entity_keys: List[str]) -> bytes: + """ + Serialize keys to a bytestring so it can be used to prefix-scan through items stored in the online store + using serialize_entity_key. + + This encoding is a partial implementation of serialize_entity_key, only operating on the keys of entities, + and not the values. + """ + sorted_keys = sorted(entity_keys) + output: List[bytes] = [] + for k in sorted_keys: + output.append(struct.pack(" bytes: """ Serialize entity key to a bytestring so it can be used as a lookup key in a hash table. diff --git a/sdk/python/feast/infra/local.py b/sdk/python/feast/infra/local.py index f677c84672..7249d247a2 100644 --- a/sdk/python/feast/infra/local.py +++ b/sdk/python/feast/infra/local.py @@ -1,154 +1,67 @@ +import uuid from datetime import datetime -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - -import pandas as pd -import pytz -from tqdm import tqdm - -from feast import FeatureTable -from feast.entity import Entity -from feast.feature_view import FeatureView -from feast.infra.offline_stores.helpers import get_offline_store_from_config -from feast.infra.online_stores.helpers import get_online_store_from_config -from feast.infra.provider import ( - Provider, - RetrievalJob, - _convert_arrow_to_proto, - _get_column_names, - _run_field_mapping, -) -from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto -from feast.protos.feast.types.Value_pb2 import Value as ValueProto -from feast.registry import Registry -from feast.repo_config import RepoConfig - - -class LocalProvider(Provider): - def __init__(self, config: RepoConfig): - assert config is not None - self.config = config - self.offline_store = get_offline_store_from_config(config.offline_store) - self.online_store = get_online_store_from_config(config.online_store) - - def update_infra( - self, - project: str, - tables_to_delete: Sequence[Union[FeatureTable, FeatureView]], - tables_to_keep: Sequence[Union[FeatureTable, FeatureView]], - entities_to_delete: Sequence[Entity], - entities_to_keep: Sequence[Entity], - partial: bool, - ): - self.online_store.update( - self.config, - tables_to_delete, - tables_to_keep, - entities_to_delete, - entities_to_keep, - partial, - ) - - def teardown_infra( - self, - project: str, - tables: Sequence[Union[FeatureTable, FeatureView]], - entities: Sequence[Entity], - ) -> None: - self.online_store.teardown(self.config, tables, entities) - - def online_write_batch( - self, - config: RepoConfig, - table: Union[FeatureTable, FeatureView], - data: List[ - Tuple[EntityKeyProto, Dict[str, ValueProto], datetime, Optional[datetime]] - ], - progress: Optional[Callable[[int], Any]], - ) -> None: - self.online_store.online_write_batch(config, table, data, progress) - - def online_read( - self, - config: RepoConfig, - table: Union[FeatureTable, FeatureView], - entity_keys: List[EntityKeyProto], - requested_features: List[str] = None, - ) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]: - result = self.online_store.online_read(config, table, entity_keys) - - return result - - def materialize_single_feature_view( - self, - config: RepoConfig, - feature_view: FeatureView, - start_date: datetime, - end_date: datetime, - registry: Registry, - project: str, - tqdm_builder: Callable[[int], tqdm], - ) -> None: - entities = [] - for entity_name in feature_view.entities: - entities.append(registry.get_entity(entity_name, project)) - - ( - join_key_columns, - feature_name_columns, - event_timestamp_column, - created_timestamp_column, - ) = _get_column_names(feature_view, entities) - - offline_job = self.offline_store.pull_latest_from_table_or_query( - data_source=feature_view.input, - join_key_columns=join_key_columns, - feature_name_columns=feature_name_columns, - event_timestamp_column=event_timestamp_column, - created_timestamp_column=created_timestamp_column, - start_date=start_date, - end_date=end_date, - config=config, - ) - table = offline_job.to_arrow() - - if feature_view.input.field_mapping is not None: - table = _run_field_mapping(table, feature_view.input.field_mapping) - - join_keys = [entity.join_key for entity in entities] - rows_to_write = _convert_arrow_to_proto(table, feature_view, join_keys) - - with tqdm_builder(len(rows_to_write)) as pbar: - self.online_write_batch( - self.config, feature_view, rows_to_write, lambda x: pbar.update(x) +from pathlib import Path +from typing import List + +from feast.infra.infra_object import Infra, InfraObject +from feast.infra.passthrough_provider import PassthroughProvider +from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto +from feast.registry_store import RegistryStore +from feast.repo_config import RegistryConfig, RepoConfig +from feast.usage import log_exceptions_and_usage + + +class LocalProvider(PassthroughProvider): + """ + This class only exists for backwards compatibility. + """ + + def plan_infra( + self, config: RepoConfig, desired_registry_proto: RegistryProto + ) -> Infra: + infra = Infra() + if self.online_store: + infra_objects: List[InfraObject] = self.online_store.plan( + config, desired_registry_proto ) - - def get_historical_features( - self, - config: RepoConfig, - feature_views: List[FeatureView], - feature_refs: List[str], - entity_df: Union[pd.DataFrame, str], - registry: Registry, - project: str, - full_feature_names: bool, - ) -> RetrievalJob: - return self.offline_store.get_historical_features( - config=config, - feature_views=feature_views, - feature_refs=feature_refs, - entity_df=entity_df, - registry=registry, - project=project, - full_feature_names=full_feature_names, + infra.infra_objects += infra_objects + return infra + + +class LocalRegistryStore(RegistryStore): + def __init__(self, registry_config: RegistryConfig, repo_path: Path): + registry_path = Path(registry_config.path) + if registry_path.is_absolute(): + self._filepath = registry_path + else: + self._filepath = repo_path.joinpath(registry_path) + + @log_exceptions_and_usage(registry="local") + def get_registry_proto(self): + registry_proto = RegistryProto() + if self._filepath.exists(): + registry_proto.ParseFromString(self._filepath.read_bytes()) + return registry_proto + raise FileNotFoundError( + f'Registry not found at path "{self._filepath}". Have you run "feast apply"?' ) - -def _table_id(project: str, table: Union[FeatureTable, FeatureView]) -> str: - return f"{project}_{table.name}" - - -def _to_naive_utc(ts: datetime): - if ts.tzinfo is None: - return ts - else: - return ts.astimezone(pytz.utc).replace(tzinfo=None) + @log_exceptions_and_usage(registry="local") + def update_registry_proto(self, registry_proto: RegistryProto): + self._write_registry(registry_proto) + + def teardown(self): + try: + self._filepath.unlink() + except FileNotFoundError: + # If the file deletion fails with FileNotFoundError, the file has already + # been deleted. + pass + + def _write_registry(self, registry_proto: RegistryProto): + registry_proto.version_id = str(uuid.uuid4()) + registry_proto.last_updated.FromDatetime(datetime.utcnow()) + file_dir = self._filepath.parent + file_dir.mkdir(exist_ok=True) + with open(self._filepath, mode="wb", buffering=0) as f: + f.write(registry_proto.SerializeToString()) diff --git a/sdk/python/feast/infra/offline_stores/bigquery.py b/sdk/python/feast/infra/offline_stores/bigquery.py index 16dc8e950c..29d0e029d9 100644 --- a/sdk/python/feast/infra/offline_stores/bigquery.py +++ b/sdk/python/feast/infra/offline_stores/bigquery.py @@ -1,34 +1,47 @@ -import time +import contextlib import uuid -from dataclasses import asdict, dataclass from datetime import date, datetime, timedelta -from typing import List, Optional, Set, Union +from typing import ( + Callable, + ContextManager, + Dict, + Iterator, + List, + Optional, + Tuple, + Union, +) -import pandas +import numpy as np +import pandas as pd import pyarrow -from jinja2 import BaseLoader, Environment -from pandas import Timestamp +import pyarrow.parquet from pydantic import StrictStr from pydantic.typing import Literal from tenacity import Retrying, retry_if_exception_type, stop_after_delay, wait_fixed -from feast import errors +from feast import flags_helper from feast.data_source import DataSource from feast.errors import ( BigQueryJobCancelled, BigQueryJobStillRunning, FeastProviderLoginError, + InvalidEntityType, ) -from feast.feature_view import FeatureView -from feast.infra.offline_stores.offline_store import OfflineStore, RetrievalJob -from feast.infra.provider import ( - DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL, - _get_requested_feature_views_to_features_dict, +from feast.feature_view import DUMMY_ENTITY_ID, DUMMY_ENTITY_VAL, FeatureView +from feast.infra.offline_stores import offline_utils +from feast.infra.offline_stores.offline_store import ( + OfflineStore, + RetrievalJob, + RetrievalMetadata, ) +from feast.on_demand_feature_view import OnDemandFeatureView from feast.registry import Registry from feast.repo_config import FeastConfigBaseModel, RepoConfig -from .bigquery_source import BigQuerySource +from ...saved_dataset import SavedDatasetStorage +from ...usage import log_exceptions_and_usage +from .bigquery_source import BigQuerySource, SavedDatasetBigQueryStorage try: from google.api_core.exceptions import NotFound @@ -43,7 +56,7 @@ class BigQueryOfflineStoreConfig(FeastConfigBaseModel): - """ Offline store config for GCP BigQuery """ + """Offline store config for GCP BigQuery""" type: Literal["bigquery"] = "bigquery" """ Offline store type selector""" @@ -54,15 +67,23 @@ class BigQueryOfflineStoreConfig(FeastConfigBaseModel): project_id: Optional[StrictStr] = None """ (optional) GCP project name used for the BigQuery offline store """ + location: Optional[StrictStr] = None + """ (optional) GCP location name used for the BigQuery offline store. + Examples of location names include ``US``, ``EU``, ``us-central1``, ``us-west4``. + If a location is not specified, the location defaults to the ``US`` multi-regional location. + For more information on BigQuery data locations see: https://cloud.google.com/bigquery/docs/locations + """ + class BigQueryOfflineStore(OfflineStore): @staticmethod + @log_exceptions_and_usage(offline_store="bigquery") def pull_latest_from_table_or_query( config: RepoConfig, data_source: DataSource, join_key_columns: List[str], feature_name_columns: List[str], - event_timestamp_column: str, + timestamp_field: str, created_timestamp_column: Optional[str], start_date: datetime, end_date: datetime, @@ -75,31 +96,71 @@ def pull_latest_from_table_or_query( partition_by_join_key_string = ( "PARTITION BY " + partition_by_join_key_string ) - timestamps = [event_timestamp_column] + timestamps = [timestamp_field] if created_timestamp_column: timestamps.append(created_timestamp_column) timestamp_desc_string = " DESC, ".join(timestamps) + " DESC" field_string = ", ".join(join_key_columns + feature_name_columns + timestamps) - client = _get_bigquery_client(project=config.offline_store.project_id) + client = _get_bigquery_client( + project=config.offline_store.project_id, + location=config.offline_store.location, + ) query = f""" - SELECT {field_string} + SELECT + {field_string} + {f", {repr(DUMMY_ENTITY_VAL)} AS {DUMMY_ENTITY_ID}" if not join_key_columns else ""} FROM ( SELECT {field_string}, ROW_NUMBER() OVER({partition_by_join_key_string} ORDER BY {timestamp_desc_string}) AS _feast_row FROM {from_expression} - WHERE {event_timestamp_column} BETWEEN TIMESTAMP('{start_date}') AND TIMESTAMP('{end_date}') + WHERE {timestamp_field} BETWEEN TIMESTAMP('{start_date}') AND TIMESTAMP('{end_date}') ) WHERE _feast_row = 1 """ - return BigQueryRetrievalJob(query=query, client=client, config=config) + + # When materializing a single feature view, we don't need full feature names. On demand transforms aren't materialized + return BigQueryRetrievalJob( + query=query, client=client, config=config, full_feature_names=False, + ) + + @staticmethod + @log_exceptions_and_usage(offline_store="bigquery") + def pull_all_from_table_or_query( + config: RepoConfig, + data_source: DataSource, + join_key_columns: List[str], + feature_name_columns: List[str], + timestamp_field: str, + start_date: datetime, + end_date: datetime, + ) -> RetrievalJob: + assert isinstance(data_source, BigQuerySource) + from_expression = data_source.get_table_query_string() + + client = _get_bigquery_client( + project=config.offline_store.project_id, + location=config.offline_store.location, + ) + field_string = ", ".join( + join_key_columns + feature_name_columns + [timestamp_field] + ) + query = f""" + SELECT {field_string} + FROM {from_expression} + WHERE {timestamp_field} BETWEEN TIMESTAMP('{start_date}') AND TIMESTAMP('{end_date}') + """ + return BigQueryRetrievalJob( + query=query, client=client, config=config, full_feature_names=False, + ) @staticmethod + @log_exceptions_and_usage(offline_store="bigquery") def get_historical_features( config: RepoConfig, feature_views: List[FeatureView], feature_refs: List[str], - entity_df: Union[pandas.DataFrame, str], + entity_df: Union[pd.DataFrame, str], registry: Registry, project: str, full_feature_names: bool = False, @@ -107,153 +168,134 @@ def get_historical_features( # TODO: Add entity_df validation in order to fail before interacting with BigQuery assert isinstance(config.offline_store, BigQueryOfflineStoreConfig) - client = _get_bigquery_client(project=config.offline_store.project_id) - expected_join_keys = _get_join_keys(project, feature_views, registry) + client = _get_bigquery_client( + project=config.offline_store.project_id, + location=config.offline_store.location, + ) assert isinstance(config.offline_store, BigQueryOfflineStoreConfig) - table = _upload_entity_df_into_bigquery( - client=client, - project=config.project, - dataset_name=config.offline_store.dataset, - dataset_project=client.project, - entity_df=entity_df, - ) - - entity_df_event_timestamp_col = _infer_event_timestamp_from_bigquery_query( - table.schema - ) - _assert_expected_columns_in_bigquery( - expected_join_keys, entity_df_event_timestamp_col, table.schema, + table_reference = _get_table_reference_for_new_entity( + client, + client.project, + config.offline_store.dataset, + config.offline_store.location, ) - # Build a query context containing all information required to template the BigQuery SQL query - query_context = get_feature_view_query_context( - feature_refs, - feature_views, - registry, - project, - full_feature_names=full_feature_names, - ) + entity_schema = _get_entity_schema(client=client, entity_df=entity_df,) - # Infer min and max timestamps from entity_df to limit data read in BigQuery SQL query - min_timestamp, max_timestamp = _get_entity_df_timestamp_bounds( - client, str(table.reference), entity_df_event_timestamp_col + entity_df_event_timestamp_col = offline_utils.infer_event_timestamp_from_entity_df( + entity_schema ) - # Generate the BigQuery SQL query from the query context - query = build_point_in_time_query( - query_context, - min_timestamp=min_timestamp, - max_timestamp=max_timestamp, - left_table_query_string=str(table.reference), - entity_df_event_timestamp_col=entity_df_event_timestamp_col, - full_feature_names=full_feature_names, + entity_df_event_timestamp_range = _get_entity_df_event_timestamp_range( + entity_df, entity_df_event_timestamp_col, client, ) - job = BigQueryRetrievalJob(query=query, client=client, config=config) - return job - - -def _assert_expected_columns_in_dataframe( - join_keys: Set[str], entity_df_event_timestamp_col: str, entity_df: pandas.DataFrame -): - entity_df_columns = set(entity_df.columns.values) - expected_columns = join_keys.copy() - expected_columns.add(entity_df_event_timestamp_col) - - missing_keys = expected_columns - entity_df_columns - - if len(missing_keys) != 0: - raise errors.FeastEntityDFMissingColumnsError(expected_columns, missing_keys) - - -def _assert_expected_columns_in_bigquery( - join_keys: Set[str], entity_df_event_timestamp_col: str, table_schema -): - entity_columns = set() - for schema_field in table_schema: - entity_columns.add(schema_field.name) - - expected_columns = join_keys.copy() - expected_columns.add(entity_df_event_timestamp_col) + @contextlib.contextmanager + def query_generator() -> Iterator[str]: + _upload_entity_df( + client=client, table_name=table_reference, entity_df=entity_df, + ) - missing_keys = expected_columns - entity_columns + expected_join_keys = offline_utils.get_expected_join_keys( + project, feature_views, registry + ) - if len(missing_keys) != 0: - raise errors.FeastEntityDFMissingColumnsError(expected_columns, missing_keys) + offline_utils.assert_expected_columns_in_entity_df( + entity_schema, expected_join_keys, entity_df_event_timestamp_col + ) + # Build a query context containing all information required to template the BigQuery SQL query + query_context = offline_utils.get_feature_view_query_context( + feature_refs, + feature_views, + registry, + project, + entity_df_event_timestamp_range, + ) -def _get_join_keys( - project: str, feature_views: List[FeatureView], registry: Registry -) -> Set[str]: - join_keys = set() - for feature_view in feature_views: - entities = feature_view.entities - for entity_name in entities: - entity = registry.get_entity(entity_name, project) - join_keys.add(entity.join_key) - return join_keys + # Generate the BigQuery SQL query from the query context + query = offline_utils.build_point_in_time_query( + query_context, + left_table_query_string=table_reference, + entity_df_event_timestamp_col=entity_df_event_timestamp_col, + entity_df_columns=entity_schema.keys(), + query_template=MULTIPLE_FEATURE_VIEW_POINT_IN_TIME_JOIN, + full_feature_names=full_feature_names, + ) + try: + yield query + finally: + # Asynchronously clean up the uploaded Bigquery table, which will expire + # if cleanup fails + client.delete_table(table=table_reference, not_found_ok=True) -def _infer_event_timestamp_from_bigquery_query(table_schema) -> str: - if any( - schema_field.name == DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL - for schema_field in table_schema - ): - return DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL - else: - datetime_columns = list( - filter( - lambda schema_field: schema_field.field_type == "TIMESTAMP", - table_schema, - ) + return BigQueryRetrievalJob( + query=query_generator, + client=client, + config=config, + full_feature_names=full_feature_names, + on_demand_feature_views=OnDemandFeatureView.get_requested_odfvs( + feature_refs, project, registry + ), + metadata=RetrievalMetadata( + features=feature_refs, + keys=list(entity_schema.keys() - {entity_df_event_timestamp_col}), + min_event_timestamp=entity_df_event_timestamp_range[0], + max_event_timestamp=entity_df_event_timestamp_range[1], + ), ) - if len(datetime_columns) == 1: - print( - f"Using {datetime_columns[0].name} as the event timestamp. To specify a column explicitly, please name it {DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL}." - ) - return datetime_columns[0].name - else: - raise ValueError( - f"Please provide an entity_df with a column named {DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL} representing the time of events." - ) -def _infer_event_timestamp_from_dataframe(entity_df: pandas.DataFrame) -> str: - if DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL in entity_df.columns: - return DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL - else: - datetime_columns = entity_df.select_dtypes( - include=["datetime", "datetimetz"] - ).columns - if len(datetime_columns) == 1: - print( - f"Using {datetime_columns[0]} as the event timestamp. To specify a column explicitly, please name it {DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL}." - ) - return datetime_columns[0] +class BigQueryRetrievalJob(RetrievalJob): + def __init__( + self, + query: Union[str, Callable[[], ContextManager[str]]], + client: bigquery.Client, + config: RepoConfig, + full_feature_names: bool, + on_demand_feature_views: Optional[List[OnDemandFeatureView]] = None, + metadata: Optional[RetrievalMetadata] = None, + ): + if not isinstance(query, str): + self._query_generator = query else: - raise ValueError( - f"Please provide an entity_df with a column named {DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL} representing the time of events." - ) + @contextlib.contextmanager + def query_generator() -> Iterator[str]: + assert isinstance(query, str) + yield query -class BigQueryRetrievalJob(RetrievalJob): - def __init__(self, query, client, config): - self.query = query + self._query_generator = query_generator self.client = client self.config = config + self._full_feature_names = full_feature_names + self._on_demand_feature_views = ( + on_demand_feature_views if on_demand_feature_views else [] + ) + self._metadata = metadata + + @property + def full_feature_names(self) -> bool: + return self._full_feature_names + + @property + def on_demand_feature_views(self) -> Optional[List[OnDemandFeatureView]]: + return self._on_demand_feature_views - def to_df(self): - # TODO: Ideally only start this job when the user runs "get_historical_features", not when they run to_df() - df = self.client.query(self.query).to_dataframe(create_bqstorage_client=True) - return df + def _to_df_internal(self) -> pd.DataFrame: + with self._query_generator() as query: + df = self._execute_query(query).to_dataframe(create_bqstorage_client=True) + return df def to_sql(self) -> str: """ Returns the SQL query that will be executed in BigQuery to build the historical feature table. """ - return self.query + with self._query_generator() as query: + return query def to_bigquery( self, @@ -280,28 +322,58 @@ def to_bigquery( path = f"{self.client.project}.{self.config.offline_store.dataset}.historical_{today}_{rand_id}" job_config = bigquery.QueryJobConfig(destination=path) - bq_job = self.client.query(self.query, job_config=job_config) + if not job_config.dry_run and self.on_demand_feature_views: + job = self.client.load_table_from_dataframe( + self.to_df(), job_config.destination + ) + job.result() + print(f"Done writing to '{job_config.destination}'.") + return str(job_config.destination) + + with self._query_generator() as query: + self._execute_query(query, job_config, timeout) - if job_config.dry_run: + print(f"Done writing to '{job_config.destination}'.") + return str(job_config.destination) + + def _to_arrow_internal(self) -> pyarrow.Table: + with self._query_generator() as query: + q = self._execute_query(query=query) + assert q + return q.to_arrow() + + @log_exceptions_and_usage + def _execute_query( + self, query, job_config=None, timeout: int = 1800 + ) -> Optional[bigquery.job.query.QueryJob]: + bq_job = self.client.query(query, job_config=job_config) + + if job_config and job_config.dry_run: print( "This query will process {} bytes.".format(bq_job.total_bytes_processed) ) return None block_until_done(client=self.client, bq_job=bq_job, timeout=timeout) + return bq_job - print(f"Done writing to '{job_config.destination}'.") - return str(job_config.destination) + def persist(self, storage: SavedDatasetStorage): + assert isinstance(storage, SavedDatasetBigQueryStorage) - def to_arrow(self) -> pyarrow.Table: - return self.client.query(self.query).to_arrow() + self.to_bigquery( + bigquery.QueryJobConfig(destination=storage.bigquery_options.table) + ) + + @property + def metadata(self) -> Optional[RetrievalMetadata]: + return self._metadata def block_until_done( client: Client, bq_job: Union[bigquery.job.query.QueryJob, bigquery.job.load.LoadJob], timeout: int = 1800, - retry_cadence: int = 10, + retry_cadence: float = 1, ): """ Waits for bq_job to finish running, up to a maximum amount of time specified by the timeout parameter (defaulting to 30 minutes). @@ -317,11 +389,14 @@ def block_until_done( BigQueryJobCancelled exception to signify when that the job has been cancelled (i.e. from timeout or KeyboardInterrupt). """ - def _wait_until_done(job_id): - if client.get_job(job_id).state in ["PENDING", "RUNNING"]: - raise BigQueryJobStillRunning(job_id=job_id) + # For test environments, retry more aggressively + if flags_helper.is_test(): + retry_cadence = 0.1 + + def _wait_until_done(bq_job): + if client.get_job(bq_job).state in ["PENDING", "RUNNING"]: + raise BigQueryJobStillRunning(job_id=bq_job.job_id) - job_id = bq_job.job_id try: retryer = Retrying( wait=wait_fixed(retry_cadence), @@ -329,205 +404,118 @@ def _wait_until_done(job_id): retry=retry_if_exception_type(BigQueryJobStillRunning), reraise=True, ) - retryer(_wait_until_done, job_id) + retryer(_wait_until_done, bq_job) finally: - if client.get_job(job_id).state in ["PENDING", "RUNNING"]: - client.cancel_job(job_id) - raise BigQueryJobCancelled(job_id=job_id) + if client.get_job(bq_job).state in ["PENDING", "RUNNING"]: + client.cancel_job(bq_job) + raise BigQueryJobCancelled(job_id=bq_job.job_id) if bq_job.exception(): raise bq_job.exception() -@dataclass(frozen=True) -class FeatureViewQueryContext: - """Context object used to template a BigQuery point-in-time SQL query""" - - name: str - ttl: int - entities: List[str] - features: List[str] # feature reference format - table_ref: str - event_timestamp_column: str - created_timestamp_column: Optional[str] - query: str - table_subquery: str - entity_selections: List[str] - - -def _get_table_id_for_new_entity( - client: Client, project: str, dataset_name: str, dataset_project: str +def _get_table_reference_for_new_entity( + client: Client, + dataset_project: str, + dataset_name: str, + dataset_location: Optional[str], ) -> str: """Gets the table_id for the new entity to be uploaded.""" # First create the BigQuery dataset if it doesn't exist dataset = bigquery.Dataset(f"{dataset_project}.{dataset_name}") - dataset.location = "US" + dataset.location = dataset_location if dataset_location else "US" try: - client.get_dataset(dataset) + client.get_dataset(dataset.reference) except NotFound: # Only create the dataset if it does not exist client.create_dataset(dataset, exists_ok=True) - return f"{dataset_project}.{dataset_name}.entity_df_{project}_{int(time.time())}" + table_name = offline_utils.get_temp_entity_table_name() + return f"{dataset_project}.{dataset_name}.{table_name}" -def _upload_entity_df_into_bigquery( - client: Client, - project: str, - dataset_name: str, - dataset_project: str, - entity_df: Union[pandas.DataFrame, str], + +def _upload_entity_df( + client: Client, table_name: str, entity_df: Union[pd.DataFrame, str], ) -> Table: """Uploads a Pandas entity dataframe into a BigQuery table and returns the resulting table""" - table_id = _get_table_id_for_new_entity( - client, project, dataset_name, dataset_project - ) + if isinstance(entity_df, str): + job = client.query(f"CREATE TABLE {table_name} AS ({entity_df})") - if type(entity_df) is str: - job = client.query(f"CREATE TABLE {table_id} AS ({entity_df})") - block_until_done(client, job) - elif isinstance(entity_df, pandas.DataFrame): - # Drop the index so that we dont have unnecessary columns + elif isinstance(entity_df, pd.DataFrame): + # Drop the index so that we don't have unnecessary columns entity_df.reset_index(drop=True, inplace=True) - - # Upload the dataframe into BigQuery, creating a temporary table - job_config = bigquery.LoadJobConfig() - job = client.load_table_from_dataframe( - entity_df, table_id, job_config=job_config - ) - block_until_done(client, job) + job = client.load_table_from_dataframe(entity_df, table_name) else: - raise ValueError( - f"The entity dataframe you have provided must be a Pandas DataFrame or BigQuery SQL query, " - f"but we found: {type(entity_df)} " - ) + raise InvalidEntityType(type(entity_df)) + + block_until_done(client, job) # Ensure that the table expires after some time - table = client.get_table(table=table_id) + table = client.get_table(table=table_name) table.expires = datetime.utcnow() + timedelta(minutes=30) client.update_table(table, ["expires"]) return table -def _get_entity_df_timestamp_bounds( - client: Client, entity_df_bq_table: str, event_timestamp_col: str, -): - - boundary_df = ( - client.query( - f""" - SELECT - MIN({event_timestamp_col}) AS min_timestamp, - MAX({event_timestamp_col}) AS max_timestamp - FROM {entity_df_bq_table} - """ +def _get_entity_schema( + client: Client, entity_df: Union[pd.DataFrame, str] +) -> Dict[str, np.dtype]: + if isinstance(entity_df, str): + entity_df_sample = ( + client.query(f"SELECT * FROM ({entity_df}) LIMIT 1").result().to_dataframe() ) - .result() - .to_dataframe() - ) - min_timestamp = boundary_df.loc[0, "min_timestamp"] - max_timestamp = boundary_df.loc[0, "max_timestamp"] - return min_timestamp, max_timestamp + entity_schema = dict(zip(entity_df_sample.columns, entity_df_sample.dtypes)) + elif isinstance(entity_df, pd.DataFrame): + entity_schema = dict(zip(entity_df.columns, entity_df.dtypes)) + else: + raise InvalidEntityType(type(entity_df)) + return entity_schema -def get_feature_view_query_context( - feature_refs: List[str], - feature_views: List[FeatureView], - registry: Registry, - project: str, - full_feature_names: bool = False, -) -> List[FeatureViewQueryContext]: - """Build a query context containing all information required to template a BigQuery point-in-time SQL query""" - feature_views_to_feature_map = _get_requested_feature_views_to_features_dict( - feature_refs, feature_views - ) - - query_context = [] - for feature_view, features in feature_views_to_feature_map.items(): - join_keys = [] - entity_selections = [] - reverse_field_mapping = { - v: k for k, v in feature_view.input.field_mapping.items() - } - for entity_name in feature_view.entities: - entity = registry.get_entity(entity_name, project) - join_keys.append(entity.join_key) - join_key_column = reverse_field_mapping.get( - entity.join_key, entity.join_key +def _get_entity_df_event_timestamp_range( + entity_df: Union[pd.DataFrame, str], + entity_df_event_timestamp_col: str, + client: Client, +) -> Tuple[datetime, datetime]: + if type(entity_df) is str: + job = client.query( + f"SELECT MIN({entity_df_event_timestamp_col}) AS min, MAX({entity_df_event_timestamp_col}) AS max " + f"FROM ({entity_df})" + ) + res = next(job.result()) + entity_df_event_timestamp_range = ( + res.get("min"), + res.get("max"), + ) + elif isinstance(entity_df, pd.DataFrame): + entity_df_event_timestamp = entity_df.loc[ + :, entity_df_event_timestamp_col + ].infer_objects() + if pd.api.types.is_string_dtype(entity_df_event_timestamp): + entity_df_event_timestamp = pd.to_datetime( + entity_df_event_timestamp, utc=True ) - entity_selections.append(f"{join_key_column} AS {entity.join_key}") - - if isinstance(feature_view.ttl, timedelta): - ttl_seconds = int(feature_view.ttl.total_seconds()) - else: - ttl_seconds = 0 - - assert isinstance(feature_view.input, BigQuerySource) - - event_timestamp_column = feature_view.input.event_timestamp_column - created_timestamp_column = feature_view.input.created_timestamp_column - - context = FeatureViewQueryContext( - name=feature_view.name, - ttl=ttl_seconds, - entities=join_keys, - features=features, - table_ref=feature_view.input.table_ref, - event_timestamp_column=reverse_field_mapping.get( - event_timestamp_column, event_timestamp_column - ), - created_timestamp_column=reverse_field_mapping.get( - created_timestamp_column, created_timestamp_column - ), - # TODO: Make created column optional and not hardcoded - query=feature_view.input.query, - table_subquery=feature_view.input.get_table_query_string(), - entity_selections=entity_selections, + entity_df_event_timestamp_range = ( + entity_df_event_timestamp.min().to_pydatetime(), + entity_df_event_timestamp.max().to_pydatetime(), ) - query_context.append(context) - return query_context - - -def build_point_in_time_query( - feature_view_query_contexts: List[FeatureViewQueryContext], - min_timestamp: Timestamp, - max_timestamp: Timestamp, - left_table_query_string: str, - entity_df_event_timestamp_col: str, - full_feature_names: bool = False, -): - """Build point-in-time query between each feature view table and the entity dataframe""" - template = Environment(loader=BaseLoader()).from_string( - source=SINGLE_FEATURE_VIEW_POINT_IN_TIME_JOIN - ) - - # Add additional fields to dict - template_context = { - "min_timestamp": min_timestamp, - "max_timestamp": max_timestamp, - "left_table_query_string": left_table_query_string, - "entity_df_event_timestamp_col": entity_df_event_timestamp_col, - "unique_entity_keys": set( - [entity for fv in feature_view_query_contexts for entity in fv.entities] - ), - "featureviews": [asdict(context) for context in feature_view_query_contexts], - "full_feature_names": full_feature_names, - } + else: + raise InvalidEntityType(type(entity_df)) - query = template.render(template_context) - return query + return entity_df_event_timestamp_range -def _get_bigquery_client(project: Optional[str] = None): +def _get_bigquery_client(project: Optional[str] = None, location: Optional[str] = None): try: - client = bigquery.Client(project=project) + client = bigquery.Client(project=project, location=location) except DefaultCredentialsError as e: raise FeastProviderLoginError( str(e) @@ -550,34 +538,43 @@ def _get_bigquery_client(project: Optional[str] = None): # * Precompute ROW_NUMBER() so that it doesn't have to be recomputed for every query on entity_dataframe # * Create temporary tables instead of keeping all tables in memory -SINGLE_FEATURE_VIEW_POINT_IN_TIME_JOIN = """ +# Note: Keep this in sync with sdk/python/feast/infra/offline_stores/redshift.py:MULTIPLE_FEATURE_VIEW_POINT_IN_TIME_JOIN + +MULTIPLE_FEATURE_VIEW_POINT_IN_TIME_JOIN = """ /* Compute a deterministic hash for the `left_table_query_string` that will be used throughout all the logic as the field to GROUP BY the data */ WITH entity_dataframe AS ( SELECT *, - {{entity_df_event_timestamp_col}} AS entity_timestamp, + {{entity_df_event_timestamp_col}} AS entity_timestamp {% for featureview in featureviews %} - CONCAT( + {% if featureview.entities %} + ,CONCAT( {% for entity in featureview.entities %} CAST({{entity}} AS STRING), {% endfor %} CAST({{entity_df_event_timestamp_col}} AS STRING) - ) AS {{featureview.name}}__entity_row_unique_id, + ) AS {{featureview.name}}__entity_row_unique_id + {% else %} + ,CAST({{entity_df_event_timestamp_col}} AS STRING) AS {{featureview.name}}__entity_row_unique_id + {% endif %} {% endfor %} - FROM {{ left_table_query_string }} + FROM `{{ left_table_query_string }}` ), {% for featureview in featureviews %} {{ featureview.name }}__entity_dataframe AS ( SELECT - {{ featureview.entities | join(', ')}}, + {{ featureview.entities | join(', ')}}{% if featureview.entities %},{% else %}{% endif %} entity_timestamp, {{featureview.name}}__entity_row_unique_id FROM entity_dataframe - GROUP BY {{ featureview.entities | join(', ')}}, entity_timestamp, {{featureview.name}}__entity_row_unique_id + GROUP BY + {{ featureview.entities | join(', ')}}{% if featureview.entities %},{% else %}{% endif %} + entity_timestamp, + {{featureview.name}}__entity_row_unique_id ), /* @@ -586,9 +583,9 @@ def _get_bigquery_client(project: Optional[str] = None): 1. We first join the current feature_view to the entity dataframe that has been passed. This JOIN has the following logic: - - For each row of the entity dataframe, only keep the rows where the `event_timestamp_column` + - For each row of the entity dataframe, only keep the rows where the `timestamp_field` is less than the one provided in the entity dataframe - - If there a TTL for the current feature_view, also keep the rows where the `event_timestamp_column` + - If there a TTL for the current feature_view, also keep the rows where the `timestamp_field` is higher the the one provided minus the TTL - For each row, Join on the entity key and retrieve the `entity_row_unique_id` that has been computed previously @@ -599,16 +596,16 @@ def _get_bigquery_client(project: Optional[str] = None): {{ featureview.name }}__subquery AS ( SELECT - {{ featureview.event_timestamp_column }} as event_timestamp, + {{ featureview.timestamp_field }} as event_timestamp, {{ featureview.created_timestamp_column ~ ' as created_timestamp,' if featureview.created_timestamp_column else '' }} - {{ featureview.entity_selections | join(', ')}}, + {{ featureview.entity_selections | join(', ')}}{% if featureview.entity_selections %},{% else %}{% endif %} {% for feature in featureview.features %} - {{ feature }} as {% if full_feature_names %}{{ featureview.name }}__{{feature}}{% else %}{{ feature }}{% endif %}{% if loop.last %}{% else %}, {% endif %} + {{ feature }} as {% if full_feature_names %}{{ featureview.name }}__{{featureview.field_mapping.get(feature, feature)}}{% else %}{{ featureview.field_mapping.get(feature, feature) }}{% endif %}{% if loop.last %}{% else %}, {% endif %} {% endfor %} FROM {{ featureview.table_subquery }} - WHERE {{ featureview.event_timestamp_column }} <= '{{max_timestamp}}' + WHERE {{ featureview.timestamp_field }} <= '{{ featureview.max_event_timestamp }}' {% if featureview.ttl == 0 %}{% else %} - AND {{ featureview.event_timestamp_column }} >= Timestamp_sub('{{min_timestamp}}', interval {{ featureview.ttl }} second) + AND {{ featureview.timestamp_field }} >= '{{ featureview.min_event_timestamp }}' {% endif %} ), @@ -642,7 +639,7 @@ def _get_bigquery_client(project: Optional[str] = None): SELECT {{featureview.name}}__entity_row_unique_id, event_timestamp, - MAX(created_timestamp) as created_timestamp, + MAX(created_timestamp) as created_timestamp FROM {{ featureview.name }}__base GROUP BY {{featureview.name}}__entity_row_unique_id, event_timestamp ), @@ -654,19 +651,23 @@ def _get_bigquery_client(project: Optional[str] = None): */ {{ featureview.name }}__latest AS ( SELECT - {{featureview.name}}__entity_row_unique_id, - MAX(event_timestamp) AS event_timestamp + event_timestamp, + {% if featureview.created_timestamp_column %}created_timestamp,{% endif %} + {{featureview.name}}__entity_row_unique_id + FROM + ( + SELECT *, + ROW_NUMBER() OVER( + PARTITION BY {{featureview.name}}__entity_row_unique_id + ORDER BY event_timestamp DESC{% if featureview.created_timestamp_column %},created_timestamp DESC{% endif %} + ) AS row_number + FROM {{ featureview.name }}__base {% if featureview.created_timestamp_column %} - ,ANY_VALUE(created_timestamp) AS created_timestamp + INNER JOIN {{ featureview.name }}__dedup + USING ({{featureview.name}}__entity_row_unique_id, event_timestamp, created_timestamp) {% endif %} - - FROM {{ featureview.name }}__base - {% if featureview.created_timestamp_column %} - INNER JOIN {{ featureview.name }}__dedup - USING ({{featureview.name}}__entity_row_unique_id, event_timestamp, created_timestamp) - {% endif %} - - GROUP BY {{featureview.name}}__entity_row_unique_id + ) + WHERE row_number = 1 ), /* @@ -693,14 +694,14 @@ def _get_bigquery_client(project: Optional[str] = None): The entity_dataframe dataset being our source of truth here. */ -SELECT * EXCEPT(entity_timestamp, {% for featureview in featureviews %} {{featureview.name}}__entity_row_unique_id{% if loop.last %}{% else %},{% endif %}{% endfor %}) +SELECT {{ final_output_feature_names | join(', ')}} FROM entity_dataframe {% for featureview in featureviews %} LEFT JOIN ( SELECT - {{featureview.name}}__entity_row_unique_id, + {{featureview.name}}__entity_row_unique_id {% for feature in featureview.features %} - {% if full_feature_names %}{{ featureview.name }}__{{feature}}{% else %}{{ feature }}{% endif %}, + ,{% if full_feature_names %}{{ featureview.name }}__{{featureview.field_mapping.get(feature, feature)}}{% else %}{{ featureview.field_mapping.get(feature, feature) }}{% endif %} {% endfor %} FROM {{ featureview.name }}__cleaned ) USING ({{featureview.name}}__entity_row_unique_id) diff --git a/sdk/python/feast/infra/offline_stores/bigquery_source.py b/sdk/python/feast/infra/offline_stores/bigquery_source.py index a5c1afa3e0..001576c98f 100644 --- a/sdk/python/feast/infra/offline_stores/bigquery_source.py +++ b/sdk/python/feast/infra/offline_stores/bigquery_source.py @@ -1,32 +1,97 @@ -from typing import Callable, Dict, Iterable, Optional, Tuple +import warnings +from typing import Callable, Dict, Iterable, List, Optional, Tuple from feast import type_map from feast.data_source import DataSource from feast.errors import DataSourceNotFoundException from feast.protos.feast.core.DataSource_pb2 import DataSource as DataSourceProto +from feast.protos.feast.core.SavedDataset_pb2 import ( + SavedDatasetStorage as SavedDatasetStorageProto, +) from feast.repo_config import RepoConfig +from feast.saved_dataset import SavedDatasetStorage from feast.value_type import ValueType class BigQuerySource(DataSource): def __init__( self, + *, event_timestamp_column: Optional[str] = "", - table_ref: Optional[str] = None, + table: Optional[str] = None, created_timestamp_column: Optional[str] = "", field_mapping: Optional[Dict[str, str]] = None, - date_partition_column: Optional[str] = "", + date_partition_column: Optional[str] = None, query: Optional[str] = None, + name: Optional[str] = None, + description: Optional[str] = "", + tags: Optional[Dict[str, str]] = None, + owner: Optional[str] = "", + timestamp_field: Optional[str] = None, ): - self._bigquery_options = BigQueryOptions(table_ref=table_ref, query=query) + """Create a BigQuerySource from an existing table or query. + + Args: + table (optional): The BigQuery table where features can be found. + event_timestamp_column: (Deprecated) Event timestamp column used for point in time joins of feature values. + created_timestamp_column (optional): Timestamp column when row was created, used for deduplicating rows. + field_mapping: A dictionary mapping of column names in this data source to feature names in a feature table + or view. Only used for feature columns, not entities or timestamp columns. + date_partition_column (deprecated): Timestamp column used for partitioning. + query (optional): SQL query to execute to generate data for this data source. + name (optional): Name for the source. Defaults to the table if not specified. + description (optional): A human-readable description. + tags (optional): A dictionary of key-value pairs to store arbitrary metadata. + owner (optional): The owner of the bigquery source, typically the email of the primary + maintainer. + timestamp_field (optional): Event timestamp field used for point in time + joins of feature values. + Example: + >>> from feast import BigQuerySource + >>> my_bigquery_source = BigQuerySource(table="gcp_project:bq_dataset.bq_table") + """ + if table is None and query is None: + raise ValueError('No "table" or "query" argument provided.') + + self.bigquery_options = BigQueryOptions(table=table, query=query) + + if date_partition_column: + warnings.warn( + ( + "The argument 'date_partition_column' is not supported for BigQuery sources. " + "It will be removed in Feast 0.21+" + ), + DeprecationWarning, + ) + + # If no name, use the table as the default name + _name = name + if not _name: + if table: + _name = table + else: + warnings.warn( + ( + f"Starting in Feast 0.21, Feast will require either a name for a data source (if using query) or `table`: {self.query}" + ), + DeprecationWarning, + ) super().__init__( - event_timestamp_column, - created_timestamp_column, - field_mapping, - date_partition_column, + name=_name if _name else "", + event_timestamp_column=event_timestamp_column, + created_timestamp_column=created_timestamp_column, + field_mapping=field_mapping, + description=description, + tags=tags, + owner=owner, + timestamp_field=timestamp_field, ) + # Note: Python requires redefining hash in child classes that override __eq__ + def __hash__(self): + return super().__hash__() + def __eq__(self, other): if not isinstance(other, BigQuerySource): raise TypeError( @@ -34,60 +99,48 @@ def __eq__(self, other): ) return ( - self.bigquery_options.table_ref == other.bigquery_options.table_ref - and self.bigquery_options.query == other.bigquery_options.query - and self.event_timestamp_column == other.event_timestamp_column - and self.created_timestamp_column == other.created_timestamp_column - and self.field_mapping == other.field_mapping + super().__eq__(other) + and self.table == other.table + and self.query == other.query ) @property - def table_ref(self): - return self._bigquery_options.table_ref + def table(self): + return self.bigquery_options.table @property def query(self): - return self._bigquery_options.query - - @property - def bigquery_options(self): - """ - Returns the bigquery options of this data source - """ - return self._bigquery_options - - @bigquery_options.setter - def bigquery_options(self, bigquery_options): - """ - Sets the bigquery options of this data source - """ - self._bigquery_options = bigquery_options + return self.bigquery_options.query @staticmethod def from_proto(data_source: DataSourceProto): - assert data_source.HasField("bigquery_options") return BigQuerySource( + name=data_source.name, field_mapping=dict(data_source.field_mapping), - table_ref=data_source.bigquery_options.table_ref, - event_timestamp_column=data_source.event_timestamp_column, + table=data_source.bigquery_options.table, + timestamp_field=data_source.timestamp_field, created_timestamp_column=data_source.created_timestamp_column, - date_partition_column=data_source.date_partition_column, query=data_source.bigquery_options.query, + description=data_source.description, + tags=dict(data_source.tags), + owner=data_source.owner, ) def to_proto(self) -> DataSourceProto: data_source_proto = DataSourceProto( + name=self.name, type=DataSourceProto.BATCH_BIGQUERY, field_mapping=self.field_mapping, bigquery_options=self.bigquery_options.to_proto(), + description=self.description, + tags=self.tags, + owner=self.owner, + timestamp_field=self.timestamp_field, + created_timestamp_column=self.created_timestamp_column, ) - data_source_proto.event_timestamp_column = self.event_timestamp_column - data_source_proto.created_timestamp_column = self.created_timestamp_column - data_source_proto.date_partition_column = self.date_partition_column - return data_source_proto def validate(self, config: RepoConfig): @@ -97,14 +150,14 @@ def validate(self, config: RepoConfig): client = bigquery.Client() try: - client.get_table(self.table_ref) + client.get_table(self.table) except NotFound: - raise DataSourceNotFoundException(self.table_ref) + raise DataSourceNotFoundException(self.table) def get_table_query_string(self) -> str: """Returns a string that can directly be used to reference this table in SQL""" - if self.table_ref: - return f"`{self.table_ref}`" + if self.table: + return f"`{self.table}`" else: return f"({self.query})" @@ -118,59 +171,35 @@ def get_table_column_names_and_types( from google.cloud import bigquery client = bigquery.Client() - if self.table_ref is not None: - table_schema = client.get_table(self.table_ref).schema - if not isinstance(table_schema[0], bigquery.schema.SchemaField): + if self.table: + schema = client.get_table(self.table).schema + if not isinstance(schema[0], bigquery.schema.SchemaField): raise TypeError("Could not parse BigQuery table schema.") - - name_type_pairs = [(field.name, field.field_type) for field in table_schema] else: bq_columns_query = f"SELECT * FROM ({self.query}) LIMIT 1" queryRes = client.query(bq_columns_query).result() - name_type_pairs = [ - (schema_field.name, schema_field.field_type) - for schema_field in queryRes.schema - ] + schema = queryRes.schema + + name_type_pairs: List[Tuple[str, str]] = [] + for field in schema: + bq_type_as_str = field.field_type + if field.mode == "REPEATED": + bq_type_as_str = "ARRAY<" + bq_type_as_str + ">" + name_type_pairs.append((field.name, bq_type_as_str)) return name_type_pairs class BigQueryOptions: """ - DataSource BigQuery options used to source features from BigQuery query + Configuration options for a BigQuery data source. """ - def __init__(self, table_ref: Optional[str], query: Optional[str]): - self._table_ref = table_ref - self._query = query - - @property - def query(self): - """ - Returns the BigQuery SQL query referenced by this source - """ - return self._query - - @query.setter - def query(self, query): - """ - Sets the BigQuery SQL query referenced by this source - """ - self._query = query - - @property - def table_ref(self): - """ - Returns the table ref of this BQ table - """ - return self._table_ref - - @table_ref.setter - def table_ref(self, table_ref): - """ - Sets the table ref of this BQ table - """ - self._table_ref = table_ref + def __init__( + self, table: Optional[str], query: Optional[str], + ): + self.table = table or "" + self.query = query or "" @classmethod def from_proto(cls, bigquery_options_proto: DataSourceProto.BigQueryOptions): @@ -183,10 +212,8 @@ def from_proto(cls, bigquery_options_proto: DataSourceProto.BigQueryOptions): Returns: Returns a BigQueryOptions object based on the bigquery_options protobuf """ - bigquery_options = cls( - table_ref=bigquery_options_proto.table_ref, - query=bigquery_options_proto.query, + table=bigquery_options_proto.table, query=bigquery_options_proto.query, ) return bigquery_options @@ -198,9 +225,31 @@ def to_proto(self) -> DataSourceProto.BigQueryOptions: Returns: BigQueryOptionsProto protobuf """ - bigquery_options_proto = DataSourceProto.BigQueryOptions( - table_ref=self.table_ref, query=self.query, + table=self.table, query=self.query, ) return bigquery_options_proto + + +class SavedDatasetBigQueryStorage(SavedDatasetStorage): + _proto_attr_name = "bigquery_storage" + + bigquery_options: BigQueryOptions + + def __init__(self, table: str): + self.bigquery_options = BigQueryOptions(table=table, query=None) + + @staticmethod + def from_proto(storage_proto: SavedDatasetStorageProto) -> SavedDatasetStorage: + return SavedDatasetBigQueryStorage( + table=BigQueryOptions.from_proto(storage_proto.bigquery_storage).table + ) + + def to_proto(self) -> SavedDatasetStorageProto: + return SavedDatasetStorageProto( + bigquery_storage=self.bigquery_options.to_proto() + ) + + def to_data_source(self) -> DataSource: + return BigQuerySource(table=self.bigquery_options.table) diff --git a/sdk/python/tensorflow_metadata/proto/__init__.py b/sdk/python/feast/infra/offline_stores/contrib/__init__.py similarity index 100% rename from sdk/python/tensorflow_metadata/proto/__init__.py rename to sdk/python/feast/infra/offline_stores/contrib/__init__.py diff --git a/sdk/python/feast/infra/offline_stores/contrib/contrib_repo_configuration.py b/sdk/python/feast/infra/offline_stores/contrib/contrib_repo_configuration.py new file mode 100644 index 0000000000..370ed0f47c --- /dev/null +++ b/sdk/python/feast/infra/offline_stores/contrib/contrib_repo_configuration.py @@ -0,0 +1,14 @@ +from tests.integration.feature_repos.integration_test_repo_config import ( + IntegrationTestRepoConfig, +) +from tests.integration.feature_repos.universal.data_sources.spark_data_source_creator import ( + SparkDataSourceCreator, +) +from tests.integration.feature_repos.universal.data_sources.trino import ( + TrinoSourceCreator, +) + +FULL_REPO_CONFIGS = [ + IntegrationTestRepoConfig(offline_store_creator=SparkDataSourceCreator), + IntegrationTestRepoConfig(offline_store_creator=TrinoSourceCreator), +] diff --git a/sdk/python/tensorflow_metadata/proto/v0/__init__.py b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/__init__.py similarity index 100% rename from sdk/python/tensorflow_metadata/proto/v0/__init__.py rename to sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/__init__.py diff --git a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py new file mode 100644 index 0000000000..770bd8adc2 --- /dev/null +++ b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py @@ -0,0 +1,547 @@ +import warnings +from datetime import datetime +from typing import Dict, List, Optional, Tuple, Union + +import numpy as np +import pandas +import pandas as pd +import pyarrow +import pyspark +from pydantic import StrictStr +from pyspark import SparkConf +from pyspark.sql import SparkSession +from pytz import utc + +from feast import FeatureView, OnDemandFeatureView +from feast.data_source import DataSource +from feast.errors import InvalidEntityType +from feast.feature_view import DUMMY_ENTITY_ID, DUMMY_ENTITY_VAL +from feast.infra.offline_stores import offline_utils +from feast.infra.offline_stores.contrib.spark_offline_store.spark_source import ( + SavedDatasetSparkStorage, + SparkSource, +) +from feast.infra.offline_stores.offline_store import ( + OfflineStore, + RetrievalJob, + RetrievalMetadata, +) +from feast.registry import Registry +from feast.repo_config import FeastConfigBaseModel, RepoConfig +from feast.saved_dataset import SavedDatasetStorage +from feast.type_map import spark_schema_to_np_dtypes +from feast.usage import log_exceptions_and_usage + + +class SparkOfflineStoreConfig(FeastConfigBaseModel): + type: StrictStr = "spark" + """ Offline store type selector""" + + spark_conf: Optional[Dict[str, str]] = None + """ Configuration overlay for the spark session """ + # sparksession is not serializable and we dont want to pass it around as an argument + + +class SparkOfflineStore(OfflineStore): + @staticmethod + @log_exceptions_and_usage(offline_store="spark") + def pull_latest_from_table_or_query( + config: RepoConfig, + data_source: DataSource, + join_key_columns: List[str], + feature_name_columns: List[str], + timestamp_field: str, + created_timestamp_column: Optional[str], + start_date: datetime, + end_date: datetime, + ) -> RetrievalJob: + spark_session = get_spark_session_or_start_new_with_repoconfig( + config.offline_store + ) + assert isinstance(config.offline_store, SparkOfflineStoreConfig) + assert isinstance(data_source, SparkSource) + + warnings.warn( + "The spark offline store is an experimental feature in alpha development. " + "Some functionality may still be unstable so functionality can change in the future.", + RuntimeWarning, + ) + + print("Pulling latest features from spark offline store") + + from_expression = data_source.get_table_query_string() + + partition_by_join_key_string = ", ".join(join_key_columns) + if partition_by_join_key_string != "": + partition_by_join_key_string = ( + "PARTITION BY " + partition_by_join_key_string + ) + timestamps = [timestamp_field] + if created_timestamp_column: + timestamps.append(created_timestamp_column) + timestamp_desc_string = " DESC, ".join(timestamps) + " DESC" + field_string = ", ".join(join_key_columns + feature_name_columns + timestamps) + + start_date_str = _format_datetime(start_date) + end_date_str = _format_datetime(end_date) + query = f""" + SELECT + {field_string} + {f", {repr(DUMMY_ENTITY_VAL)} AS {DUMMY_ENTITY_ID}" if not join_key_columns else ""} + FROM ( + SELECT {field_string}, + ROW_NUMBER() OVER({partition_by_join_key_string} ORDER BY {timestamp_desc_string}) AS feast_row_ + FROM {from_expression} t1 + WHERE {timestamp_field} BETWEEN TIMESTAMP('{start_date_str}') AND TIMESTAMP('{end_date_str}') + ) t2 + WHERE feast_row_ = 1 + """ + + return SparkRetrievalJob( + spark_session=spark_session, + query=query, + full_feature_names=False, + on_demand_feature_views=None, + ) + + @staticmethod + @log_exceptions_and_usage(offline_store="spark") + def get_historical_features( + config: RepoConfig, + feature_views: List[FeatureView], + feature_refs: List[str], + entity_df: Union[pandas.DataFrame, str], + registry: Registry, + project: str, + full_feature_names: bool = False, + ) -> RetrievalJob: + assert isinstance(config.offline_store, SparkOfflineStoreConfig) + warnings.warn( + "The spark offline store is an experimental feature in alpha development. " + "Some functionality may still be unstable so functionality can change in the future.", + RuntimeWarning, + ) + spark_session = get_spark_session_or_start_new_with_repoconfig( + store_config=config.offline_store + ) + tmp_entity_df_table_name = offline_utils.get_temp_entity_table_name() + + entity_schema = _get_entity_schema( + spark_session=spark_session, entity_df=entity_df, + ) + event_timestamp_col = offline_utils.infer_event_timestamp_from_entity_df( + entity_schema=entity_schema, + ) + entity_df_event_timestamp_range = _get_entity_df_event_timestamp_range( + entity_df, event_timestamp_col, spark_session, + ) + _upload_entity_df( + spark_session=spark_session, + table_name=tmp_entity_df_table_name, + entity_df=entity_df, + event_timestamp_col=event_timestamp_col, + ) + + expected_join_keys = offline_utils.get_expected_join_keys( + project=project, feature_views=feature_views, registry=registry + ) + offline_utils.assert_expected_columns_in_entity_df( + entity_schema=entity_schema, + join_keys=expected_join_keys, + entity_df_event_timestamp_col=event_timestamp_col, + ) + + query_context = offline_utils.get_feature_view_query_context( + feature_refs, + feature_views, + registry, + project, + entity_df_event_timestamp_range, + ) + + query = offline_utils.build_point_in_time_query( + feature_view_query_contexts=query_context, + left_table_query_string=tmp_entity_df_table_name, + entity_df_event_timestamp_col=event_timestamp_col, + entity_df_columns=entity_schema.keys(), + query_template=MULTIPLE_FEATURE_VIEW_POINT_IN_TIME_JOIN, + full_feature_names=full_feature_names, + ) + + return SparkRetrievalJob( + spark_session=spark_session, + query=query, + full_feature_names=full_feature_names, + on_demand_feature_views=OnDemandFeatureView.get_requested_odfvs( + feature_refs, project, registry + ), + metadata=RetrievalMetadata( + features=feature_refs, + keys=list(set(entity_schema.keys()) - {event_timestamp_col}), + min_event_timestamp=entity_df_event_timestamp_range[0], + max_event_timestamp=entity_df_event_timestamp_range[1], + ), + ) + + @staticmethod + @log_exceptions_and_usage(offline_store="spark") + def pull_all_from_table_or_query( + config: RepoConfig, + data_source: DataSource, + join_key_columns: List[str], + feature_name_columns: List[str], + timestamp_field: str, + start_date: datetime, + end_date: datetime, + ) -> RetrievalJob: + """ + Note that join_key_columns, feature_name_columns, timestamp_field, and + created_timestamp_column have all already been mapped to column names of the + source table and those column names are the values passed into this function. + """ + assert isinstance(data_source, SparkSource) + warnings.warn( + "The spark offline store is an experimental feature in alpha development. " + "This API is unstable and it could and most probably will be changed in the future.", + RuntimeWarning, + ) + + spark_session = get_spark_session_or_start_new_with_repoconfig( + store_config=config.offline_store + ) + + fields = ", ".join(join_key_columns + feature_name_columns + [timestamp_field]) + from_expression = data_source.get_table_query_string() + start_date = start_date.astimezone(tz=utc) + end_date = end_date.astimezone(tz=utc) + + query = f""" + SELECT {fields} + FROM {from_expression} + WHERE {timestamp_field} BETWEEN TIMESTAMP '{start_date}' AND TIMESTAMP '{end_date}' + """ + + return SparkRetrievalJob( + spark_session=spark_session, query=query, full_feature_names=False + ) + + +class SparkRetrievalJob(RetrievalJob): + def __init__( + self, + spark_session: SparkSession, + query: str, + full_feature_names: bool, + on_demand_feature_views: Optional[List[OnDemandFeatureView]] = None, + metadata: Optional[RetrievalMetadata] = None, + ): + super().__init__() + self.spark_session = spark_session + self.query = query + self._full_feature_names = full_feature_names + self._on_demand_feature_views = on_demand_feature_views + self._metadata = metadata + + @property + def full_feature_names(self) -> bool: + return self._full_feature_names + + @property + def on_demand_feature_views(self) -> Optional[List[OnDemandFeatureView]]: + return self._on_demand_feature_views + + def to_spark_df(self) -> pyspark.sql.DataFrame: + statements = self.query.split("---EOS---") + *_, last = map(self.spark_session.sql, statements) + return last + + def _to_df_internal(self) -> pd.DataFrame: + """Return dataset as Pandas DataFrame synchronously""" + return self.to_spark_df().toPandas() + + def _to_arrow_internal(self) -> pyarrow.Table: + """Return dataset as pyarrow Table synchronously""" + df = self.to_df() + return pyarrow.Table.from_pandas(df) # noqa + + def persist(self, storage: SavedDatasetStorage): + """ + Run the retrieval and persist the results in the same offline store used for read. + Please note the persisting is done only within the scope of the spark session. + """ + assert isinstance(storage, SavedDatasetSparkStorage) + table_name = storage.spark_options.table + if not table_name: + raise ValueError("Cannot persist, table_name is not defined") + self.to_spark_df().createOrReplaceTempView(table_name) + + @property + def metadata(self) -> Optional[RetrievalMetadata]: + """ + Return metadata information about retrieval. + Should be available even before materializing the dataset itself. + """ + return self._metadata + + +def get_spark_session_or_start_new_with_repoconfig( + store_config: SparkOfflineStoreConfig, +) -> SparkSession: + spark_session = SparkSession.getActiveSession() + if not spark_session: + spark_builder = SparkSession.builder + spark_conf = store_config.spark_conf + if spark_conf: + spark_builder = spark_builder.config( + conf=SparkConf().setAll([(k, v) for k, v in spark_conf.items()]) + ) + + spark_session = spark_builder.getOrCreate() + spark_session.conf.set("spark.sql.parser.quotedRegexColumnNames", "true") + return spark_session + + +def _get_entity_df_event_timestamp_range( + entity_df: Union[pd.DataFrame, str], + entity_df_event_timestamp_col: str, + spark_session: SparkSession, +) -> Tuple[datetime, datetime]: + if isinstance(entity_df, pd.DataFrame): + entity_df_event_timestamp = entity_df.loc[ + :, entity_df_event_timestamp_col + ].infer_objects() + if pd.api.types.is_string_dtype(entity_df_event_timestamp): + entity_df_event_timestamp = pd.to_datetime( + entity_df_event_timestamp, utc=True + ) + entity_df_event_timestamp_range = ( + entity_df_event_timestamp.min().to_pydatetime(), + entity_df_event_timestamp.max().to_pydatetime(), + ) + elif isinstance(entity_df, str): + # If the entity_df is a string (SQL query), determine range + # from table + df = spark_session.sql(entity_df).select(entity_df_event_timestamp_col) + # TODO(kzhang132): need utc conversion here. + entity_df_event_timestamp_range = ( + df.agg({entity_df_event_timestamp_col: "max"}).collect()[0][0], + df.agg({entity_df_event_timestamp_col: "min"}).collect()[0][0], + ) + else: + raise InvalidEntityType(type(entity_df)) + + return entity_df_event_timestamp_range + + +def _get_entity_schema( + spark_session: SparkSession, entity_df: Union[pandas.DataFrame, str] +) -> Dict[str, np.dtype]: + if isinstance(entity_df, pd.DataFrame): + return dict(zip(entity_df.columns, entity_df.dtypes)) + elif isinstance(entity_df, str): + entity_spark_df = spark_session.sql(entity_df) + return dict( + zip( + entity_spark_df.columns, + spark_schema_to_np_dtypes(entity_spark_df.dtypes), + ) + ) + else: + raise InvalidEntityType(type(entity_df)) + + +def _upload_entity_df( + spark_session: SparkSession, + table_name: str, + entity_df: Union[pandas.DataFrame, str], + event_timestamp_col: str, +) -> None: + if isinstance(entity_df, pd.DataFrame): + entity_df[event_timestamp_col] = pd.to_datetime( + entity_df[event_timestamp_col], utc=True + ) + spark_session.createDataFrame(entity_df).createOrReplaceTempView(table_name) + return + elif isinstance(entity_df, str): + spark_session.sql(entity_df).createOrReplaceTempView(table_name) + return + else: + raise InvalidEntityType(type(entity_df)) + + +def _format_datetime(t: datetime) -> str: + # Since Hive does not support timezone, need to transform to utc. + if t.tzinfo: + t = t.astimezone(tz=utc) + dt = t.strftime("%Y-%m-%d %H:%M:%S.%f") + return dt + + +MULTIPLE_FEATURE_VIEW_POINT_IN_TIME_JOIN = """ +/* + Compute a deterministic hash for the `left_table_query_string` that will be used throughout + all the logic as the field to GROUP BY the data +*/ +CREATE OR REPLACE TEMPORARY VIEW entity_dataframe AS ( + SELECT *, + {{entity_df_event_timestamp_col}} AS entity_timestamp + {% for featureview in featureviews %} + ,CONCAT( + {% for entity in featureview.entities %} + CAST({{entity}} AS STRING), + {% endfor %} + CAST({{entity_df_event_timestamp_col}} AS STRING) + ) AS {{featureview.name}}__entity_row_unique_id + {% endfor %} + FROM {{ left_table_query_string }} +); + +---EOS--- + +{% for featureview in featureviews %} + +CREATE OR REPLACE TEMPORARY VIEW {{ featureview.name }}__cleaned AS ( + + WITH {{ featureview.name }}__entity_dataframe AS ( + SELECT + {{ featureview.entities | join(', ')}}{% if featureview.entities %},{% else %}{% endif %} + entity_timestamp, + {{featureview.name}}__entity_row_unique_id + FROM entity_dataframe + GROUP BY + {{ featureview.entities | join(', ')}}{% if featureview.entities %},{% else %}{% endif %} + entity_timestamp, + {{featureview.name}}__entity_row_unique_id + ), + + /* + This query template performs the point-in-time correctness join for a single feature set table + to the provided entity table. + + 1. We first join the current feature_view to the entity dataframe that has been passed. + This JOIN has the following logic: + - For each row of the entity dataframe, only keep the rows where the `timestamp_field` + is less than the one provided in the entity dataframe + - If there a TTL for the current feature_view, also keep the rows where the `timestamp_field` + is higher the the one provided minus the TTL + - For each row, Join on the entity key and retrieve the `entity_row_unique_id` that has been + computed previously + + The output of this CTE will contain all the necessary information and already filtered out most + of the data that is not relevant. + */ + + {{ featureview.name }}__subquery AS ( + SELECT + {{ featureview.timestamp_field }} as event_timestamp, + {{ featureview.created_timestamp_column ~ ' as created_timestamp,' if featureview.created_timestamp_column else '' }} + {{ featureview.entity_selections | join(', ')}}{% if featureview.entity_selections %},{% else %}{% endif %} + {% for feature in featureview.features %} + {{ feature }} as {% if full_feature_names %}{{ featureview.name }}__{{featureview.field_mapping.get(feature, feature)}}{% else %}{{ featureview.field_mapping.get(feature, feature) }}{% endif %}{% if loop.last %}{% else %}, {% endif %} + {% endfor %} + FROM {{ featureview.table_subquery }} + WHERE {{ featureview.timestamp_field }} <= '{{ featureview.max_event_timestamp }}' + {% if featureview.ttl == 0 %}{% else %} + AND {{ featureview.timestamp_field }} >= '{{ featureview.min_event_timestamp }}' + {% endif %} + ), + + {{ featureview.name }}__base AS ( + SELECT + subquery.*, + entity_dataframe.entity_timestamp, + entity_dataframe.{{featureview.name}}__entity_row_unique_id + FROM {{ featureview.name }}__subquery AS subquery + INNER JOIN {{ featureview.name }}__entity_dataframe AS entity_dataframe + ON TRUE + AND subquery.event_timestamp <= entity_dataframe.entity_timestamp + + {% if featureview.ttl == 0 %}{% else %} + AND subquery.event_timestamp >= entity_dataframe.entity_timestamp - {{ featureview.ttl }} * interval '1' second + {% endif %} + + {% for entity in featureview.entities %} + AND subquery.{{ entity }} = entity_dataframe.{{ entity }} + {% endfor %} + ), + + /* + 2. If the `created_timestamp_column` has been set, we need to + deduplicate the data first. This is done by calculating the + `MAX(created_at_timestamp)` for each event_timestamp. + We then join the data on the next CTE + */ + {% if featureview.created_timestamp_column %} + {{ featureview.name }}__dedup AS ( + SELECT + {{featureview.name}}__entity_row_unique_id, + event_timestamp, + MAX(created_timestamp) as created_timestamp + FROM {{ featureview.name }}__base + GROUP BY {{featureview.name}}__entity_row_unique_id, event_timestamp + ), + {% endif %} + + /* + 3. The data has been filtered during the first CTE "*__base" + Thus we only need to compute the latest timestamp of each feature. + */ + {{ featureview.name }}__latest AS ( + SELECT + event_timestamp, + {% if featureview.created_timestamp_column %}created_timestamp,{% endif %} + {{featureview.name}}__entity_row_unique_id + FROM + ( + SELECT *, + ROW_NUMBER() OVER( + PARTITION BY {{featureview.name}}__entity_row_unique_id + ORDER BY event_timestamp DESC{% if featureview.created_timestamp_column %},created_timestamp DESC{% endif %} + ) AS row_number + FROM {{ featureview.name }}__base + {% if featureview.created_timestamp_column %} + INNER JOIN {{ featureview.name }}__dedup + USING ({{featureview.name}}__entity_row_unique_id, event_timestamp, created_timestamp) + {% endif %} + ) + WHERE row_number = 1 + ) + + /* + 4. Once we know the latest value of each feature for a given timestamp, + we can join again the data back to the original "base" dataset + */ + SELECT base.* + FROM {{ featureview.name }}__base as base + INNER JOIN {{ featureview.name }}__latest + USING( + {{featureview.name}}__entity_row_unique_id, + event_timestamp + {% if featureview.created_timestamp_column %} + ,created_timestamp + {% endif %} + ) +) + +---EOS--- + +{% endfor %} + +/* + Joins the outputs of multiple time travel joins to a single table. + The entity_dataframe dataset being our source of truth here. + */ + +SELECT {{ final_output_feature_names | join(', ')}} +FROM entity_dataframe +{% for featureview in featureviews %} +LEFT JOIN ( + SELECT + {{featureview.name}}__entity_row_unique_id + {% for feature in featureview.features %} + ,{% if full_feature_names %}{{ featureview.name }}__{{featureview.field_mapping.get(feature, feature)}}{% else %}{{ featureview.field_mapping.get(feature, feature) }}{% endif %} + {% endfor %} + FROM {{ featureview.name }}__cleaned +) USING ({{featureview.name}}__entity_row_unique_id) +{% endfor %} +""" diff --git a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark_source.py b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark_source.py new file mode 100644 index 0000000000..dc92e08a50 --- /dev/null +++ b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark_source.py @@ -0,0 +1,327 @@ +import logging +import traceback +import warnings +from enum import Enum +from typing import Any, Callable, Dict, Iterable, Optional, Tuple + +from pyspark.sql import SparkSession + +from feast.data_source import DataSource +from feast.errors import DataSourceNoNameException +from feast.infra.offline_stores.offline_utils import get_temp_entity_table_name +from feast.protos.feast.core.DataSource_pb2 import DataSource as DataSourceProto +from feast.protos.feast.core.SavedDataset_pb2 import ( + SavedDatasetStorage as SavedDatasetStorageProto, +) +from feast.repo_config import RepoConfig +from feast.saved_dataset import SavedDatasetStorage +from feast.type_map import spark_to_feast_value_type +from feast.value_type import ValueType + +logger = logging.getLogger(__name__) + + +class SparkSourceFormat(Enum): + csv = "csv" + json = "json" + parquet = "parquet" + + +class SparkSource(DataSource): + def __init__( + self, + *, + name: Optional[str] = None, + table: Optional[str] = None, + query: Optional[str] = None, + path: Optional[str] = None, + file_format: Optional[str] = None, + event_timestamp_column: Optional[str] = None, + created_timestamp_column: Optional[str] = None, + field_mapping: Optional[Dict[str, str]] = None, + date_partition_column: Optional[str] = None, + description: Optional[str] = "", + tags: Optional[Dict[str, str]] = None, + owner: Optional[str] = "", + timestamp_field: Optional[str] = None, + ): + # If no name, use the table_ref as the default name + _name = name + if not _name: + if table: + _name = table + else: + raise DataSourceNoNameException() + + if date_partition_column: + warnings.warn( + ( + "The argument 'date_partition_column' is not supported for Spark sources." + "It will be removed in Feast 0.21+" + ), + DeprecationWarning, + ) + + super().__init__( + name=_name, + event_timestamp_column=event_timestamp_column, + created_timestamp_column=created_timestamp_column, + field_mapping=field_mapping, + description=description, + tags=tags, + owner=owner, + timestamp_field=timestamp_field, + ) + warnings.warn( + "The spark data source API is an experimental feature in alpha development. " + "This API is unstable and it could and most probably will be changed in the future.", + RuntimeWarning, + ) + self.spark_options = SparkOptions( + table=table, query=query, path=path, file_format=file_format, + ) + + @property + def table(self): + """ + Returns the table of this feature data source + """ + return self.spark_options.table + + @property + def query(self): + """ + Returns the query of this feature data source + """ + return self.spark_options.query + + @property + def path(self): + """ + Returns the path of the spark data source file. + """ + return self.spark_options.path + + @property + def file_format(self): + """ + Returns the file format of this feature data source. + """ + return self.spark_options.file_format + + @staticmethod + def from_proto(data_source: DataSourceProto) -> Any: + assert data_source.HasField("spark_options") + spark_options = SparkOptions.from_proto(data_source.spark_options) + + return SparkSource( + name=data_source.name, + field_mapping=dict(data_source.field_mapping), + table=spark_options.table, + query=spark_options.query, + path=spark_options.path, + file_format=spark_options.file_format, + timestamp_field=data_source.timestamp_field, + created_timestamp_column=data_source.created_timestamp_column, + description=data_source.description, + tags=dict(data_source.tags), + owner=data_source.owner, + ) + + def to_proto(self) -> DataSourceProto: + data_source_proto = DataSourceProto( + name=self.name, + type=DataSourceProto.BATCH_SPARK, + data_source_class_type="feast.infra.offline_stores.contrib.spark_offline_store.spark_source.SparkSource", + field_mapping=self.field_mapping, + spark_options=self.spark_options.to_proto(), + description=self.description, + tags=self.tags, + owner=self.owner, + ) + + data_source_proto.timestamp_field = self.timestamp_field + data_source_proto.created_timestamp_column = self.created_timestamp_column + + return data_source_proto + + def validate(self, config: RepoConfig): + self.get_table_column_names_and_types(config) + + @staticmethod + def source_datatype_to_feast_value_type() -> Callable[[str], ValueType]: + return spark_to_feast_value_type + + def get_table_column_names_and_types( + self, config: RepoConfig + ) -> Iterable[Tuple[str, str]]: + from feast.infra.offline_stores.contrib.spark_offline_store.spark import ( + get_spark_session_or_start_new_with_repoconfig, + ) + + spark_session = get_spark_session_or_start_new_with_repoconfig( + store_config=config.offline_store + ) + df = spark_session.sql(f"SELECT * FROM {self.get_table_query_string()}") + return ( + (fields["name"], fields["type"]) + for fields in df.schema.jsonValue()["fields"] + ) + + def get_table_query_string(self) -> str: + """Returns a string that can directly be used to reference this table in SQL""" + if self.table: + # Backticks make sure that spark sql knows this a table reference. + return f"`{self.table}`" + if self.query: + return f"({self.query})" + + # If both the table query string and the actual query are null, we can load from file. + spark_session = SparkSession.getActiveSession() + if spark_session is None: + raise AssertionError("Could not find an active spark session.") + try: + df = spark_session.read.format(self.file_format).load(self.path) + except Exception: + logger.exception( + "Spark read of file source failed.\n" + traceback.format_exc() + ) + tmp_table_name = get_temp_entity_table_name() + df.createOrReplaceTempView(tmp_table_name) + + return f"`{tmp_table_name}`" + + +class SparkOptions: + allowed_formats = [format.value for format in SparkSourceFormat] + + def __init__( + self, + table: Optional[str], + query: Optional[str], + path: Optional[str], + file_format: Optional[str], + ): + # Check that only one of the ways to load a spark dataframe can be used. We have + # to treat empty string and null the same due to proto (de)serialization. + if sum([(not (not arg)) for arg in [table, query, path]]) != 1: + raise ValueError( + "Exactly one of params(table, query, path) must be specified." + ) + if path: + if not file_format: + raise ValueError( + "If 'path' is specified, then 'file_format' is required." + ) + if file_format not in self.allowed_formats: + raise ValueError( + f"'file_format' should be one of {self.allowed_formats}" + ) + + self._table = table + self._query = query + self._path = path + self._file_format = file_format + + @property + def table(self): + return self._table + + @table.setter + def table(self, table): + self._table = table + + @property + def query(self): + return self._query + + @query.setter + def query(self, query): + self._query = query + + @property + def path(self): + return self._path + + @path.setter + def path(self, path): + self._path = path + + @property + def file_format(self): + return self._file_format + + @file_format.setter + def file_format(self, file_format): + self._file_format = file_format + + @classmethod + def from_proto(cls, spark_options_proto: DataSourceProto.SparkOptions): + """ + Creates a SparkOptions from a protobuf representation of a spark option + args: + spark_options_proto: a protobuf representation of a datasource + Returns: + Returns a SparkOptions object based on the spark_options protobuf + """ + spark_options = cls( + table=spark_options_proto.table, + query=spark_options_proto.query, + path=spark_options_proto.path, + file_format=spark_options_proto.file_format, + ) + + return spark_options + + def to_proto(self) -> DataSourceProto.SparkOptions: + """ + Converts an SparkOptionsProto object to its protobuf representation. + Returns: + SparkOptionsProto protobuf + """ + spark_options_proto = DataSourceProto.SparkOptions( + table=self.table, + query=self.query, + path=self.path, + file_format=self.file_format, + ) + + return spark_options_proto + + +class SavedDatasetSparkStorage(SavedDatasetStorage): + _proto_attr_name = "spark_storage" + + spark_options: SparkOptions + + def __init__( + self, + table: Optional[str] = None, + query: Optional[str] = None, + path: Optional[str] = None, + file_format: Optional[str] = None, + ): + self.spark_options = SparkOptions( + table=table, query=query, path=path, file_format=file_format, + ) + + @staticmethod + def from_proto(storage_proto: SavedDatasetStorageProto) -> SavedDatasetStorage: + spark_options = SparkOptions.from_proto(storage_proto.spark_storage) + return SavedDatasetSparkStorage( + table=spark_options.table, + query=spark_options.query, + path=spark_options.path, + file_format=spark_options.file_format, + ) + + def to_proto(self) -> SavedDatasetStorageProto: + return SavedDatasetStorageProto(spark_storage=self.spark_options.to_proto()) + + def to_data_source(self) -> DataSource: + return SparkSource( + table=self.spark_options.table, + query=self.spark_options.query, + path=self.spark_options.path, + file_format=self.spark_options.file_format, + ) diff --git a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/README.md b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/README.md new file mode 100644 index 0000000000..23fc2092da --- /dev/null +++ b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/README.md @@ -0,0 +1,30 @@ +# Trino offline store +This is a walkthrough to talk about how we can test the trino plugin locally + +## Start Trino in a docker container +```sh +make start-trino-locally +``` + +Normally this should start a docker container named `trino` listening on the port 8080. +You can see the docker command executed by looking at the `Makefile` at the root. + +You can look at the queries being executed during the tests with the [local cluster UI](http://0.0.0.0:8080/ui/#) running. +This can be helpful to debug the Trino plugin while executing tests. + +## Run the universal suites locally +```sh +make test-trino-plugin-locally +``` + +## Kill the local Trino container +```sh +make kill-trino-locally +``` + +You can always look at the running containers and kill the ones you don't need anymore +```sh +docker ps +docker stop {NAME/SHA OF THE CONTAINER} +``` +feast.sdk.python.feast.infra.offline_stores.contrib.trino_offline_store \ No newline at end of file diff --git a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/__init__.py b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/connectors/__init__.py b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/connectors/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/connectors/upload.py b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/connectors/upload.py new file mode 100644 index 0000000000..5967b7a863 --- /dev/null +++ b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/connectors/upload.py @@ -0,0 +1,194 @@ +""" +Connectors can be found in the following doc https://trino.io/docs/current/connector.html + +For the hive connector, all file formats are here +https://trino.io/docs/current/connector/hive.html#supported-file-types + +Example yaml config +```yaml +offline_store: + type: trino + host: localhost + port: 8080 + catalog: hive + dataset: ci + connector: + type: hive + file_format: parquet +``` +""" +from datetime import datetime +from typing import Any, Dict, Iterator, Optional, Set + +import numpy as np +import pandas as pd +import pyarrow +from pytz import utc + +from feast.infra.offline_stores.contrib.trino_offline_store.trino_queries import Trino +from feast.infra.offline_stores.contrib.trino_offline_store.trino_type_map import ( + pa_to_trino_value_type, +) + +CONNECTORS_DONT_SUPPORT_CREATE_TABLE: Set[str] = { + "druid", + "elasticsearch", + "googlesheets", + "jmx", + "kafka", + "kinesis", + "localfile", + "pinot", + "postgresql", + "prometheus", + "redis", + "thrift", + "tpcds", + "tpch", +} +CONNECTORS_WITHOUT_WITH_STATEMENTS: Set[str] = { + "bigquery", + "cassandra", + "memory", + "mongodb", + "mysql", + "oracle", + "redshift", + "memsql", +} + +CREATE_SCHEMA_QUERY_TEMPLATE = """ +CREATE TABLE IF NOT EXISTS {table} ( + {schema} +) +{with_statement} +""" + +INSERT_ROWS_QUERY_TEMPLATE = """ +INSERT INTO {table} ({columns}) +VALUES {values} +""" + + +def pyarrow_schema_from_dataframe(df: pd.DataFrame) -> Dict[str, Any]: + pyarrow_schema = pyarrow.Table.from_pandas(df).schema + trino_schema: Dict[str, Any] = {} + for field in pyarrow_schema: + try: + trino_type = pa_to_trino_value_type(str(field.type)) + except KeyError: + raise ValueError( + f"Not supported type '{field.type}' in entity_df for '{field.name}'." + ) + trino_schema[field.name] = trino_type + return trino_schema + + +def trino_table_schema_from_dataframe(df: pd.DataFrame) -> str: + return ",".join( + [ + f"{field_name} {field_type}" + for field_name, field_type in pyarrow_schema_from_dataframe(df=df).items() + ] + ) + + +def pandas_dataframe_fix_batches( + df: pd.DataFrame, batch_size: int +) -> Iterator[pd.DataFrame]: + for pos in range(0, len(df), batch_size): + yield df[pos : pos + batch_size] + + +def format_pandas_row(df: pd.DataFrame) -> str: + pyarrow_schema = pyarrow_schema_from_dataframe(df=df) + + def _is_nan(value: Any) -> bool: + if value is None: + return True + + try: + return np.isnan(value) + except TypeError: + return False + + def _format_value(row: pd.Series, schema: Dict[str, Any]) -> str: + formated_values = [] + for row_name, row_value in row.iteritems(): + if schema[row_name].startswith("timestamp"): + if isinstance(row_value, datetime): + row_value = format_datetime(row_value) + formated_values.append(f"TIMESTAMP '{row_value}'") + elif isinstance(row_value, list): + formated_values.append(f"ARRAY{row_value}") + elif isinstance(row_value, np.ndarray): + formated_values.append(f"ARRAY{row_value.tolist()}") + elif isinstance(row_value, tuple): + formated_values.append(f"ARRAY{list(row_value)}") + elif isinstance(row_value, str): + formated_values.append(f"'{row_value}'") + elif _is_nan(row_value): + formated_values.append("NULL") + else: + formated_values.append(f"{row_value}") + + return f"({','.join(formated_values)})" + + results = df.apply(_format_value, args=(pyarrow_schema,), axis=1).tolist() + return ",".join(results) + + +def format_datetime(t: datetime) -> str: + if t.tzinfo: + t = t.astimezone(tz=utc) + return t.strftime("%Y-%m-%d %H:%M:%S.%f") + + +def upload_pandas_dataframe_to_trino( + client: Trino, + df: pd.DataFrame, + table: str, + connector_args: Optional[Dict[str, str]] = None, + batch_size: int = 1000000, # 1 million rows by default +) -> None: + connector_args = connector_args or {} + connector_name = connector_args["type"] + + if connector_name in CONNECTORS_DONT_SUPPORT_CREATE_TABLE: + raise ValueError( + f"The connector '{connector_name}' is not supported because it does not support CREATE TABLE operations" + ) + elif connector_name in CONNECTORS_WITHOUT_WITH_STATEMENTS: + with_statement = "" + elif connector_name in {"hive", "iceberg"}: + if "file_format" not in connector_args.keys(): + raise ValueError( + f"The connector {connector_name} needs the argument 'file_format' in order to create tables with Trino" + ) + with_statement = f"WITH (format = '{connector_args['file_format']}')" + elif connector_name in {"kudu", "phoenix", "sqlserver"}: + raise ValueError( + f"The connector {connector_name} is not yet supported. PRs welcome :)" + ) + else: + raise ValueError( + f"The connector {connector_name} is not part of the connectors listed in the Trino website: https://trino.io/docs/current/connector.html" + ) + + client.execute_query( + CREATE_SCHEMA_QUERY_TEMPLATE.format( + table=table, + schema=trino_table_schema_from_dataframe(df=df), + with_statement=with_statement, + ) + ) + + # Upload batchs of 1M rows at a time + for batch_df in pandas_dataframe_fix_batches(df=df, batch_size=batch_size): + client.execute_query( + INSERT_ROWS_QUERY_TEMPLATE.format( + table=table, + columns=",".join(batch_df.columns), + values=format_pandas_row(batch_df), + ) + ) diff --git a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/test_config/__init__.py b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/test_config/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/test_config/manual_tests.py b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/test_config/manual_tests.py new file mode 100644 index 0000000000..5228ed84a9 --- /dev/null +++ b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/test_config/manual_tests.py @@ -0,0 +1,12 @@ +from tests.integration.feature_repos.integration_test_repo_config import ( + IntegrationTestRepoConfig, +) +from tests.integration.feature_repos.universal.data_sources.trino import ( + TrinoSourceCreator, +) + +FULL_REPO_CONFIGS = [ + IntegrationTestRepoConfig( + provider="local", offline_store_creator=TrinoSourceCreator, + ), +] diff --git a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/test_config/properties/memory.properties b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/test_config/properties/memory.properties new file mode 100644 index 0000000000..6a291def3c --- /dev/null +++ b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/test_config/properties/memory.properties @@ -0,0 +1,2 @@ +connector.name=memory +memory.max-data-per-node=128MB \ No newline at end of file diff --git a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino.py b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino.py new file mode 100644 index 0000000000..87a99b820e --- /dev/null +++ b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino.py @@ -0,0 +1,573 @@ +import uuid +from datetime import date, datetime +from typing import Any, Dict, List, Optional, Tuple, Union + +import numpy as np +import pandas as pd +import pyarrow +from pydantic import StrictStr +from trino.auth import Authentication + +from feast.data_source import DataSource +from feast.errors import InvalidEntityType +from feast.feature_view import DUMMY_ENTITY_ID, DUMMY_ENTITY_VAL, FeatureView +from feast.infra.offline_stores import offline_utils +from feast.infra.offline_stores.contrib.trino_offline_store.connectors.upload import ( + upload_pandas_dataframe_to_trino, +) +from feast.infra.offline_stores.contrib.trino_offline_store.trino_queries import Trino +from feast.infra.offline_stores.contrib.trino_offline_store.trino_source import ( + SavedDatasetTrinoStorage, + TrinoSource, +) +from feast.infra.offline_stores.offline_store import ( + OfflineStore, + RetrievalJob, + RetrievalMetadata, +) +from feast.on_demand_feature_view import OnDemandFeatureView +from feast.registry import Registry +from feast.repo_config import FeastConfigBaseModel, RepoConfig +from feast.saved_dataset import SavedDatasetStorage +from feast.usage import log_exceptions_and_usage + + +class TrinoOfflineStoreConfig(FeastConfigBaseModel): + """Online store config for Trino""" + + type: StrictStr = "trino" + """ Offline store type selector """ + + host: StrictStr + """ Host of the Trino cluster """ + + port: int + """ Port of the Trino cluster """ + + catalog: StrictStr + """ Catalog of the Trino cluster """ + + connector: Dict[str, str] + """ + Trino connector to use as well as potential extra parameters. + Needs to contain at least the path, for example + {"type": "bigquery"} + or + {"type": "hive", "file_format": "parquet"} + """ + + dataset: StrictStr = "feast" + """ (optional) Trino Dataset name for temporary tables """ + + +class TrinoRetrievalJob(RetrievalJob): + def __init__( + self, + query: str, + client: Trino, + config: RepoConfig, + full_feature_names: bool, + on_demand_feature_views: Optional[List[OnDemandFeatureView]] = None, + metadata: Optional[RetrievalMetadata] = None, + ): + self._query = query + self._client = client + self._config = config + self._full_feature_names = full_feature_names + self._on_demand_feature_views = on_demand_feature_views + self._metadata = metadata + + @property + def full_feature_names(self) -> bool: + return self._full_feature_names + + @property + def on_demand_feature_views(self) -> Optional[List[OnDemandFeatureView]]: + return self._on_demand_feature_views + + def _to_df_internal(self) -> pd.DataFrame: + """Return dataset as Pandas DataFrame synchronously including on demand transforms""" + results = self._client.execute_query(query_text=self._query) + self.pyarrow_schema = results.pyarrow_schema + return results.to_dataframe() + + def _to_arrow_internal(self) -> pyarrow.Table: + """Return payrrow dataset as synchronously including on demand transforms""" + return pyarrow.Table.from_pandas( + self._to_df_internal(), schema=self.pyarrow_schema + ) + + def to_sql(self) -> str: + """Returns the SQL query that will be executed in Trino to build the historical feature table""" + return self._query + + def to_trino( + self, + destination_table: Optional[str] = None, + timeout: int = 1800, + retry_cadence: int = 10, + ) -> Optional[str]: + """ + Triggers the execution of a historical feature retrieval query and exports the results to a Trino table. + Runs for a maximum amount of time specified by the timeout parameter (defaulting to 30 minutes). + Args: + timeout: An optional number of seconds for setting the time limit of the QueryJob. + retry_cadence: An optional number of seconds for setting how long the job should checked for completion. + Returns: + Returns the destination table name. + """ + if not destination_table: + today = date.today().strftime("%Y%m%d") + rand_id = str(uuid.uuid4())[:7] + destination_table = f"{self._client.catalog}.{self._config.offline_store.dataset}.historical_{today}_{rand_id}" + + # TODO: Implement the timeout logic + query = f"CREATE TABLE {destination_table} AS ({self._query})" + self._client.execute_query(query_text=query) + return destination_table + + def persist(self, storage: SavedDatasetStorage): + """ + Run the retrieval and persist the results in the same offline store used for read. + """ + if not isinstance(storage, SavedDatasetTrinoStorage): + raise ValueError( + f"The storage object is not a `SavedDatasetTrinoStorage` but is instead a {type(storage)}" + ) + self.to_trino(destination_table=storage.trino_options.table) + + @property + def metadata(self) -> Optional[RetrievalMetadata]: + """ + Return metadata information about retrieval. + Should be available even before materializing the dataset itself. + """ + return self._metadata + + +class TrinoOfflineStore(OfflineStore): + @staticmethod + @log_exceptions_and_usage(offline_store="trino") + def pull_latest_from_table_or_query( + config: RepoConfig, + data_source: DataSource, + join_key_columns: List[str], + feature_name_columns: List[str], + timestamp_field: str, + created_timestamp_column: Optional[str], + start_date: datetime, + end_date: datetime, + user: str = "user", + auth: Optional[Authentication] = None, + http_scheme: Optional[str] = None, + ) -> TrinoRetrievalJob: + if not isinstance(data_source, TrinoSource): + raise ValueError( + f"The data_source object is not a TrinoSource but is instead '{type(data_source)}'" + ) + if not isinstance(config.offline_store, TrinoOfflineStoreConfig): + raise ValueError( + f"The config.offline_store object is not a TrinoOfflineStoreConfig but is instead '{type(config.offline_store)}'" + ) + + from_expression = data_source.get_table_query_string() + + partition_by_join_key_string = ", ".join(join_key_columns) + if partition_by_join_key_string != "": + partition_by_join_key_string = ( + "PARTITION BY " + partition_by_join_key_string + ) + timestamps = [timestamp_field] + if created_timestamp_column: + timestamps.append(created_timestamp_column) + timestamp_desc_string = " DESC, ".join(timestamps) + " DESC" + field_string = ", ".join(join_key_columns + feature_name_columns + timestamps) + + client = _get_trino_client( + config=config, user=user, auth=auth, http_scheme=http_scheme + ) + + query = f""" + SELECT + {field_string} + {f", {repr(DUMMY_ENTITY_VAL)} AS {DUMMY_ENTITY_ID}" if not join_key_columns else ""} + FROM ( + SELECT {field_string}, + ROW_NUMBER() OVER({partition_by_join_key_string} ORDER BY {timestamp_desc_string}) AS _feast_row + FROM {from_expression} + WHERE {timestamp_field} BETWEEN TIMESTAMP '{start_date}' AND TIMESTAMP '{end_date}' + ) + WHERE _feast_row = 1 + """ + + # When materializing a single feature view, we don't need full feature names. On demand transforms aren't materialized + return TrinoRetrievalJob( + query=query, client=client, config=config, full_feature_names=False, + ) + + @staticmethod + @log_exceptions_and_usage(offline_store="trino") + def get_historical_features( + config: RepoConfig, + feature_views: List[FeatureView], + feature_refs: List[str], + entity_df: Union[pd.DataFrame, str], + registry: Registry, + project: str, + full_feature_names: bool = False, + user: str = "user", + auth: Optional[Authentication] = None, + http_scheme: Optional[str] = None, + ) -> TrinoRetrievalJob: + if not isinstance(config.offline_store, TrinoOfflineStoreConfig): + raise ValueError( + f"This function should be used with a TrinoOfflineStoreConfig object. Instead we have config.offline_store being '{type(config.offline_store)}'" + ) + + client = _get_trino_client( + config=config, user=user, auth=auth, http_scheme=http_scheme + ) + + table_reference = _get_table_reference_for_new_entity( + catalog=config.offline_store.catalog, + dataset_name=config.offline_store.dataset, + ) + + entity_schema = _upload_entity_df_and_get_entity_schema( + client=client, + table_name=table_reference, + entity_df=entity_df, + connector=config.offline_store.connector, + ) + + entity_df_event_timestamp_col = offline_utils.infer_event_timestamp_from_entity_df( + entity_schema=entity_schema + ) + + entity_df_event_timestamp_range = _get_entity_df_event_timestamp_range( + entity_df=entity_df, + entity_df_event_timestamp_col=entity_df_event_timestamp_col, + client=client, + ) + + expected_join_keys = offline_utils.get_expected_join_keys( + project=project, feature_views=feature_views, registry=registry + ) + + offline_utils.assert_expected_columns_in_entity_df( + entity_schema=entity_schema, + join_keys=expected_join_keys, + entity_df_event_timestamp_col=entity_df_event_timestamp_col, + ) + + # Build a query context containing all information required to template the Trino SQL query + query_context = offline_utils.get_feature_view_query_context( + feature_refs=feature_refs, + feature_views=feature_views, + registry=registry, + project=project, + entity_df_timestamp_range=entity_df_event_timestamp_range, + ) + + # Generate the Trino SQL query from the query context + query = offline_utils.build_point_in_time_query( + query_context, + left_table_query_string=table_reference, + entity_df_event_timestamp_col=entity_df_event_timestamp_col, + entity_df_columns=entity_schema.keys(), + query_template=MULTIPLE_FEATURE_VIEW_POINT_IN_TIME_JOIN, + full_feature_names=full_feature_names, + ) + + return TrinoRetrievalJob( + query=query, + client=client, + config=config, + full_feature_names=full_feature_names, + on_demand_feature_views=OnDemandFeatureView.get_requested_odfvs( + feature_refs, project, registry + ), + metadata=RetrievalMetadata( + features=feature_refs, + keys=list(set(entity_schema.keys()) - {entity_df_event_timestamp_col}), + min_event_timestamp=entity_df_event_timestamp_range[0], + max_event_timestamp=entity_df_event_timestamp_range[1], + ), + ) + + @staticmethod + @log_exceptions_and_usage(offline_store="trino") + def pull_all_from_table_or_query( + config: RepoConfig, + data_source: DataSource, + join_key_columns: List[str], + feature_name_columns: List[str], + timestamp_field: str, + start_date: datetime, + end_date: datetime, + user: str = "user", + auth: Optional[Authentication] = None, + http_scheme: Optional[str] = None, + ) -> RetrievalJob: + if not isinstance(data_source, TrinoSource): + raise ValueError( + f"The data_source object is not a TrinoSource object but is instead a {type(data_source)}" + ) + from_expression = data_source.get_table_query_string() + + client = _get_trino_client( + config=config, user=user, auth=auth, http_scheme=http_scheme + ) + field_string = ", ".join( + join_key_columns + feature_name_columns + [timestamp_field] + ) + query = f""" + SELECT {field_string} + FROM {from_expression} + WHERE {timestamp_field} BETWEEN TIMESTAMP '{start_date}' AND TIMESTAMP '{end_date}' + """ + return TrinoRetrievalJob( + query=query, client=client, config=config, full_feature_names=False, + ) + + +def _get_table_reference_for_new_entity(catalog: str, dataset_name: str,) -> str: + """Gets the table_id for the new entity to be uploaded.""" + table_name = offline_utils.get_temp_entity_table_name() + return f"{catalog}.{dataset_name}.{table_name}" + + +def _upload_entity_df_and_get_entity_schema( + client: Trino, + table_name: str, + entity_df: Union[pd.DataFrame, str], + connector: Dict[str, str], +) -> Dict[str, np.dtype]: + """Uploads a Pandas entity dataframe into a Trino table and returns the resulting table""" + if type(entity_df) is str: + client.execute_query(f"CREATE TABLE {table_name} AS ({entity_df})") + + results = client.execute_query(f"SELECT * FROM {table_name} LIMIT 1") + + limited_entity_df = pd.DataFrame( + data=results.data, columns=results.columns_names + ) + for col_name, col_type in results.schema.items(): + if col_type == "timestamp": + limited_entity_df[col_name] = pd.to_datetime( + limited_entity_df[col_name] + ) + entity_schema = dict(zip(limited_entity_df.columns, limited_entity_df.dtypes)) + + return entity_schema + elif isinstance(entity_df, pd.DataFrame): + upload_pandas_dataframe_to_trino( + client=client, df=entity_df, table=table_name, connector_args=connector + ) + entity_schema = dict(zip(entity_df.columns, entity_df.dtypes)) + return entity_schema + else: + raise InvalidEntityType(type(entity_df)) + + # TODO: Ensure that the table expires after some time + + +def _get_trino_client( + config: RepoConfig, user: str, auth: Optional[Any], http_scheme: Optional[str] +) -> Trino: + client = Trino( + user=user, + catalog=config.offline_store.catalog, + host=config.offline_store.host, + port=config.offline_store.port, + auth=auth, + http_scheme=http_scheme, + ) + return client + + +def _get_entity_df_event_timestamp_range( + entity_df: Union[pd.DataFrame, str], + entity_df_event_timestamp_col: str, + client: Trino, +) -> Tuple[datetime, datetime]: + if type(entity_df) is str: + results = client.execute_query( + f"SELECT MIN({entity_df_event_timestamp_col}) AS min, MAX({entity_df_event_timestamp_col}) AS max " + f"FROM ({entity_df})" + ) + + entity_df_event_timestamp_range = ( + pd.to_datetime(results.data[0][0]).to_pydatetime(), + pd.to_datetime(results.data[0][1]).to_pydatetime(), + ) + elif isinstance(entity_df, pd.DataFrame): + entity_df_event_timestamp = entity_df.loc[ + :, entity_df_event_timestamp_col + ].infer_objects() + if pd.api.types.is_string_dtype(entity_df_event_timestamp): + entity_df_event_timestamp = pd.to_datetime( + entity_df_event_timestamp, utc=True + ) + entity_df_event_timestamp_range = ( + entity_df_event_timestamp.min().to_pydatetime(), + entity_df_event_timestamp.max().to_pydatetime(), + ) + else: + raise InvalidEntityType(type(entity_df)) + + return entity_df_event_timestamp_range + + +MULTIPLE_FEATURE_VIEW_POINT_IN_TIME_JOIN = """ +/* + Compute a deterministic hash for the `left_table_query_string` that will be used throughout + all the logic as the field to GROUP BY the data +*/ +WITH entity_dataframe AS ( + SELECT *, + {{entity_df_event_timestamp_col}} AS entity_timestamp + {% for featureview in featureviews %} + {% if featureview.entities %} + ,CONCAT( + {% for entity in featureview.entities %} + CAST({{entity}} AS VARCHAR), + {% endfor %} + CAST({{entity_df_event_timestamp_col}} AS VARCHAR) + ) AS {{featureview.name}}__entity_row_unique_id + {% else %} + ,CAST({{entity_df_event_timestamp_col}} AS VARCHAR) AS {{featureview.name}}__entity_row_unique_id + {% endif %} + {% endfor %} + FROM {{ left_table_query_string }} +), +{% for featureview in featureviews %} +{{ featureview.name }}__entity_dataframe AS ( + SELECT + {{ featureview.entities | join(', ')}}{% if featureview.entities %},{% else %}{% endif %} + entity_timestamp, + {{featureview.name}}__entity_row_unique_id + FROM entity_dataframe + GROUP BY + {{ featureview.entities | join(', ')}}{% if featureview.entities %},{% else %}{% endif %} + entity_timestamp, + {{featureview.name}}__entity_row_unique_id +), +/* + This query template performs the point-in-time correctness join for a single feature set table + to the provided entity table. + 1. We first join the current feature_view to the entity dataframe that has been passed. + This JOIN has the following logic: + - For each row of the entity dataframe, only keep the rows where the `timestamp_field` + is less than the one provided in the entity dataframe + - If there a TTL for the current feature_view, also keep the rows where the `timestamp_field` + is higher the the one provided minus the TTL + - For each row, Join on the entity key and retrieve the `entity_row_unique_id` that has been + computed previously + The output of this CTE will contain all the necessary information and already filtered out most + of the data that is not relevant. +*/ +{{ featureview.name }}__subquery AS ( + SELECT + {{ featureview.timestamp_field }} as event_timestamp, + {{ featureview.created_timestamp_column ~ ' as created_timestamp,' if featureview.created_timestamp_column else '' }} + {{ featureview.entity_selections | join(', ')}}{% if featureview.entity_selections %},{% else %}{% endif %} + {% for feature in featureview.features %} + {{ feature }} as {% if full_feature_names %}{{ featureview.name }}__{{featureview.field_mapping.get(feature, feature)}}{% else %}{{ featureview.field_mapping.get(feature, feature) }}{% endif %}{% if loop.last %}{% else %}, {% endif %} + {% endfor %} + FROM {{ featureview.table_subquery }} + WHERE {{ featureview.timestamp_field }} <= from_iso8601_timestamp('{{ featureview.max_event_timestamp }}') + {% if featureview.ttl == 0 %}{% else %} + AND {{ featureview.timestamp_field }} >= from_iso8601_timestamp('{{ featureview.min_event_timestamp }}') + {% endif %} +), +{{ featureview.name }}__base AS ( + SELECT + subquery.*, + entity_dataframe.entity_timestamp, + entity_dataframe.{{featureview.name}}__entity_row_unique_id + FROM {{ featureview.name }}__subquery AS subquery + INNER JOIN {{ featureview.name }}__entity_dataframe AS entity_dataframe + ON TRUE + AND subquery.event_timestamp <= entity_dataframe.entity_timestamp + {% if featureview.ttl == 0 %}{% else %} + AND subquery.event_timestamp >= entity_dataframe.entity_timestamp - interval '{{ featureview.ttl }}' second + {% endif %} + {% for entity in featureview.entities %} + AND subquery.{{ entity }} = entity_dataframe.{{ entity }} + {% endfor %} +), +/* + 2. If the `created_timestamp_column` has been set, we need to + deduplicate the data first. This is done by calculating the + `MAX(created_at_timestamp)` for each event_timestamp. + We then join the data on the next CTE +*/ +{% if featureview.created_timestamp_column %} +{{ featureview.name }}__dedup AS ( + SELECT + {{featureview.name}}__entity_row_unique_id, + event_timestamp, + MAX(created_timestamp) as created_timestamp + FROM {{ featureview.name }}__base + GROUP BY {{featureview.name}}__entity_row_unique_id, event_timestamp +), +{% endif %} +/* + 3. The data has been filtered during the first CTE "*__base" + Thus we only need to compute the latest timestamp of each feature. +*/ +{{ featureview.name }}__latest AS ( + SELECT + event_timestamp, + {% if featureview.created_timestamp_column %}created_timestamp,{% endif %} + {{featureview.name}}__entity_row_unique_id + FROM + ( + SELECT *, + ROW_NUMBER() OVER( + PARTITION BY {{featureview.name}}__entity_row_unique_id + ORDER BY event_timestamp DESC{% if featureview.created_timestamp_column %},created_timestamp DESC{% endif %} + ) AS row_number + FROM {{ featureview.name }}__base + {% if featureview.created_timestamp_column %} + INNER JOIN {{ featureview.name }}__dedup + USING ({{featureview.name}}__entity_row_unique_id, event_timestamp, created_timestamp) + {% endif %} + ) + WHERE row_number = 1 +), +/* + 4. Once we know the latest value of each feature for a given timestamp, + we can join again the data back to the original "base" dataset +*/ +{{ featureview.name }}__cleaned AS ( + SELECT base.*, {{featureview.name}}__entity_row_unique_id + FROM {{ featureview.name }}__base as base + INNER JOIN {{ featureview.name }}__latest + USING( + {{featureview.name}}__entity_row_unique_id, + event_timestamp + {% if featureview.created_timestamp_column %} + ,created_timestamp + {% endif %} + ) +){% if loop.last %}{% else %}, {% endif %} +{% endfor %} +/* + Joins the outputs of multiple time travel joins to a single table. + The entity_dataframe dataset being our source of truth here. + */ +SELECT {{ final_output_feature_names | join(', ')}} +FROM entity_dataframe +{% for featureview in featureviews %} +LEFT JOIN ( + SELECT + {{featureview.name}}__entity_row_unique_id + {% for feature in featureview.features %} + ,{% if full_feature_names %}{{ featureview.name }}__{{featureview.field_mapping.get(feature, feature)}}{% else %}{{ featureview.field_mapping.get(feature, feature) }}{% endif %} + {% endfor %} + FROM {{ featureview.name }}__cleaned +) USING ({{featureview.name}}__entity_row_unique_id) +{% endfor %} +""" diff --git a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino_queries.py b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino_queries.py new file mode 100644 index 0000000000..1d4b588124 --- /dev/null +++ b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino_queries.py @@ -0,0 +1,151 @@ +from __future__ import annotations + +import datetime +import os +import signal +from dataclasses import dataclass +from enum import Enum +from typing import Any, Dict, List, Optional + +import numpy as np +import pandas as pd +import pyarrow as pa +import trino +from trino.dbapi import Cursor +from trino.exceptions import TrinoQueryError + +from feast.infra.offline_stores.contrib.trino_offline_store.trino_type_map import ( + trino_to_pa_value_type, +) + + +class QueryStatus(Enum): + PENDING = 0 + RUNNING = 1 + ERROR = 2 + COMPLETED = 3 + CANCELLED = 4 + + +class Trino: + def __init__( + self, + host: Optional[str] = None, + port: Optional[int] = None, + user: Optional[str] = None, + catalog: Optional[str] = None, + auth: Optional[Any] = None, + http_scheme: Optional[str] = None, + ): + self.host = host or os.getenv("TRINO_HOST") + self.port = port or os.getenv("TRINO_PORT") + self.user = user or os.getenv("TRINO_USER") + self.catalog = catalog or os.getenv("TRINO_CATALOG") + self.auth = auth or os.getenv("TRINO_AUTH") + self.http_scheme = http_scheme or os.getenv("TRINO_HTTP_SCHEME") + self._cursor: Optional[Cursor] = None + + if self.host is None: + raise ValueError("TRINO_HOST must be set if not passed in") + if self.port is None: + raise ValueError("TRINO_PORT must be set if not passed in") + if self.user is None: + raise ValueError("TRINO_USER must be set if not passed in") + if self.catalog is None: + raise ValueError("TRINO_CATALOG must be set if not passed in") + + def _get_cursor(self) -> Cursor: + if self._cursor is None: + self._cursor = trino.dbapi.connect( + host=self.host, + port=self.port, + user=self.user, + catalog=self.catalog, + auth=self.auth, + http_scheme=self.http_scheme, + ).cursor() + + return self._cursor + + def create_query(self, query_text: str) -> Query: + """ + Create a Query object without executing it. + """ + return Query(query_text=query_text, cursor=self._get_cursor()) + + def execute_query(self, query_text: str) -> Results: + """ + Create a Query object and execute it. + """ + query = Query(query_text=query_text, cursor=self._get_cursor()) + return query.execute() + + +class Query(object): + def __init__(self, query_text: str, cursor: Cursor): + self.query_text = query_text + self.status = QueryStatus.PENDING + self._cursor = cursor + + signal.signal(signal.SIGINT, self.cancel) + signal.signal(signal.SIGTERM, self.cancel) + + def execute(self) -> Results: + try: + self.status = QueryStatus.RUNNING + start_time = datetime.datetime.utcnow() + + self._cursor.execute(operation=self.query_text) + rows = self._cursor.fetchall() + + end_time = datetime.datetime.utcnow() + self.execution_time = end_time - start_time + self.status = QueryStatus.COMPLETED + + return Results(data=rows, columns=self._cursor._query.columns) + except TrinoQueryError as error: + self.status = QueryStatus.ERROR + raise error + finally: + self.close() + + def close(self): + self._cursor.close() + + def cancel(self, *args): + if self.status != QueryStatus.COMPLETED: + self._cursor.cancel() + self.status = QueryStatus.CANCELLED + self.close() + + +@dataclass +class Results: + """Class for keeping track of the results of a Trino query""" + + data: List[List[Any]] + columns: List[Dict] + + @property + def columns_names(self) -> List[str]: + return [column["name"] for column in self.columns] + + @property + def schema(self) -> Dict[str, str]: + return {column["name"]: column["type"] for column in self.columns} + + @property + def pyarrow_schema(self) -> pa.Schema: + return pa.schema( + [ + pa.field(column["name"], trino_to_pa_value_type(column["type"])) + for column in self.columns + ] + ) + + def to_dataframe(self) -> pd.DataFrame: + df = pd.DataFrame(data=self.data, columns=self.columns_names) + for col_name, col_type in self.schema.items(): + if col_type.startswith("timestamp"): + df[col_name] = pd.to_datetime(df[col_name]) + return df.fillna(np.nan) diff --git a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino_source.py b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino_source.py new file mode 100644 index 0000000000..b559d0e59e --- /dev/null +++ b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino_source.py @@ -0,0 +1,241 @@ +from typing import Callable, Dict, Iterable, Optional, Tuple + +from feast import ValueType +from feast.data_source import DataSource +from feast.infra.offline_stores.contrib.trino_offline_store.trino_queries import Trino +from feast.infra.offline_stores.contrib.trino_offline_store.trino_type_map import ( + trino_to_feast_value_type, +) +from feast.protos.feast.core.DataSource_pb2 import DataSource as DataSourceProto +from feast.protos.feast.core.SavedDataset_pb2 import ( + SavedDatasetStorage as SavedDatasetStorageProto, +) +from feast.repo_config import RepoConfig +from feast.saved_dataset import SavedDatasetStorage + + +class TrinoOptions: + """ + DataSource Trino options used to source features from Trino query + """ + + def __init__(self, table: Optional[str], query: Optional[str]): + self._table = table + self._query = query + + @property + def query(self): + """ + Returns the Trino SQL query referenced by this source + """ + return self._query + + @query.setter + def query(self, query): + """ + Sets the Trino SQL query referenced by this source + """ + self._query = query + + @property + def table(self): + """ + Returns the table ref of this Trino table + """ + return self._table + + @table.setter + def table(self, table): + """ + Sets the table ref of this Trino table + """ + self._table = table + + @classmethod + def from_proto(cls, trino_options_proto: DataSourceProto.TrinoOptions): + """ + Creates a TrinoOptions from a protobuf representation of a Trino option + Args: + trino_options_proto: A protobuf representation of a DataSource + Returns: + Returns a TrinoOptions object based on the trino_options protobuf + """ + trino_options = cls( + table=trino_options_proto.table, query=trino_options_proto.query, + ) + + return trino_options + + def to_proto(self) -> DataSourceProto.TrinoOptions: + """ + Converts an TrinoOptionsProto object to its protobuf representation. + Returns: + TrinoOptionsProto protobuf + """ + + trino_options_proto = DataSourceProto.TrinoOptions( + table=self.table, query=self.query, + ) + + return trino_options_proto + + +class TrinoSource(DataSource): + def __init__( + self, + *, + event_timestamp_column: Optional[str] = "", + table: Optional[str] = None, + created_timestamp_column: Optional[str] = "", + field_mapping: Optional[Dict[str, str]] = None, + query: Optional[str] = None, + name: Optional[str] = None, + description: Optional[str] = "", + tags: Optional[Dict[str, str]] = None, + owner: Optional[str] = "", + timestamp_field: Optional[str] = None, + ): + super().__init__( + name=name if name else "", + event_timestamp_column=event_timestamp_column, + created_timestamp_column=created_timestamp_column, + field_mapping=field_mapping, + description=description, + tags=tags, + owner=owner, + timestamp_field=timestamp_field, + ) + + self._trino_options = TrinoOptions(table=table, query=query) + + def __hash__(self): + return super().__hash__() + + def __eq__(self, other): + if not isinstance(other, TrinoSource): + raise TypeError( + "Comparisons should only involve TrinoSource class objects." + ) + + return ( + self.name == other.name + and self.trino_options.table == other.trino_options.table + and self.trino_options.query == other.trino_options.query + and self.timestamp_field == other.timestamp_field + and self.created_timestamp_column == other.created_timestamp_column + and self.field_mapping == other.field_mapping + and self.description == other.description + and self.tags == other.tags + and self.owner == other.owner + ) + + @property + def table(self): + return self._trino_options.table + + @property + def query(self): + return self._trino_options.query + + @property + def trino_options(self): + """ + Returns the Trino options of this data source + """ + return self._trino_options + + @trino_options.setter + def trino_options(self, trino_options): + """ + Sets the Trino options of this data source + """ + self._trino_options = trino_options + + @staticmethod + def from_proto(data_source: DataSourceProto): + + assert data_source.HasField("trino_options") + + return TrinoSource( + name=data_source.name, + field_mapping=dict(data_source.field_mapping), + table=data_source.trino_options.table, + query=data_source.trino_options.query, + timestamp_field=data_source.timestamp_field, + created_timestamp_column=data_source.created_timestamp_column, + description=data_source.description, + tags=dict(data_source.tags), + owner=data_source.owner, + ) + + def to_proto(self) -> DataSourceProto: + data_source_proto = DataSourceProto( + name=self.name, + type=DataSourceProto.BATCH_TRINO, + field_mapping=self.field_mapping, + trino_options=self.trino_options.to_proto(), + description=self.description, + tags=self.tags, + owner=self.owner, + ) + + data_source_proto.timestamp_field = self.timestamp_field + data_source_proto.created_timestamp_column = self.created_timestamp_column + data_source_proto.date_partition_column = self.date_partition_column + + return data_source_proto + + def validate(self, config: RepoConfig): + self.get_table_column_names_and_types(config) + + def get_table_query_string(self) -> str: + """Returns a string that can directly be used to reference this table in SQL""" + return self.table or self.query + + @staticmethod + def source_datatype_to_feast_value_type() -> Callable[[str], ValueType]: + return trino_to_feast_value_type + + def get_table_column_names_and_types( + self, config: RepoConfig + ) -> Iterable[Tuple[str, str]]: + client = Trino( + user="user", + catalog=config.offline_store.catalog, + host=config.offline_store.host, + port=config.offline_store.port, + ) + if self.table: + table_schema = client.execute_query( + f"SELECT * FROM {self.table} LIMIT 1" + ).schema + else: + table_schema = client.execute_query( + f"SELECT * FROM ({self.query}) LIMIT 1" + ).schema + + return [ + (field_name, field_type) for field_name, field_type in table_schema.items() + ] + + +class SavedDatasetTrinoStorage(SavedDatasetStorage): + _proto_attr_name = "trino_storage" + + trino_options: TrinoOptions + + def __init__(self, table: Optional[str] = None, query: Optional[str] = None): + self.trino_options = TrinoOptions(table=table, query=query) + + @staticmethod + def from_proto(storage_proto: SavedDatasetStorageProto) -> SavedDatasetStorage: + # TODO: implementation is not correct. Needs fix and update to protos. + return SavedDatasetTrinoStorage( + table=TrinoOptions.from_proto(storage_proto.trino_storage).table + ) + + def to_proto(self) -> SavedDatasetStorageProto: + return SavedDatasetStorageProto(trino_storage=self.trino_options.to_proto()) + + def to_data_source(self) -> DataSource: + return TrinoSource(table=self.trino_options.table) diff --git a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino_type_map.py b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino_type_map.py new file mode 100644 index 0000000000..79066bbfa4 --- /dev/null +++ b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino_type_map.py @@ -0,0 +1,111 @@ +from typing import Dict + +import pyarrow as pa +import regex as re + +from feast import ValueType + + +def trino_to_feast_value_type(trino_type_as_str: str) -> ValueType: + type_map: Dict[str, ValueType] = { + "tinyint": ValueType.INT32, + "smallint": ValueType.INT32, + "int": ValueType.INT32, + "integer": ValueType.INT32, + "bigint": ValueType.INT64, + "double": ValueType.DOUBLE, + "decimal": ValueType.FLOAT, + "timestamp": ValueType.UNIX_TIMESTAMP, + "char": ValueType.STRING, + "varchar": ValueType.STRING, + "boolean": ValueType.BOOL, + } + return type_map[trino_type_as_str.lower()] + + +def pa_to_trino_value_type(pa_type_as_str: str) -> str: + # PyArrow types: https://arrow.apache.org/docs/python/api/datatypes.html + # Trino type: https://trino.io/docs/current/language/types.html + pa_type_as_str = pa_type_as_str.lower() + trino_type = "{}" + if pa_type_as_str.startswith("list"): + trino_type = "array<{}>" + pa_type_as_str = re.search(r"^list$", pa_type_as_str).group(1) + + if pa_type_as_str.startswith("date"): + return trino_type.format("date") + + if pa_type_as_str.startswith("timestamp"): + if "tz=" in pa_type_as_str: + return trino_type.format("timestamp with time zone") + else: + return trino_type.format("timestamp") + + if pa_type_as_str.startswith("decimal"): + return trino_type.format(pa_type_as_str) + + type_map = { + "null": "null", + "bool": "boolean", + "int8": "tinyint", + "int16": "smallint", + "int32": "int", + "int64": "bigint", + "uint8": "smallint", + "uint16": "int", + "uint32": "bigint", + "uint64": "bigint", + "float": "double", + "double": "double", + "binary": "binary", + "string": "varchar", + } + return trino_type.format(type_map[pa_type_as_str]) + + +_TRINO_TO_PA_TYPE_MAP = { + "null": pa.null(), + "boolean": pa.bool_(), + "date": pa.date32(), + "tinyint": pa.int8(), + "smallint": pa.int16(), + "integer": pa.int32(), + "bigint": pa.int64(), + "double": pa.float64(), + "binary": pa.binary(), + "char": pa.string(), +} + + +def trino_to_pa_value_type(trino_type_as_str: str) -> pa.DataType: + trino_type_as_str = trino_type_as_str.lower() + + _is_list: bool = False + if trino_type_as_str.startswith("array"): + _is_list = True + trino_type_as_str = re.search(r"^array\((\w+)\)$", trino_type_as_str).group(1) + + if trino_type_as_str.startswith("decimal"): + search_precision = re.search( + r"^decimal\((\d+)(?>,\s?\d+)?\)$", trino_type_as_str + ) + if search_precision: + precision = int(search_precision.group(1)) + if precision > 32: + pa_type = pa.float64() + else: + pa_type = pa.float32() + + elif trino_type_as_str.startswith("timestamp"): + pa_type = pa.timestamp("us") + + elif trino_type_as_str.startswith("varchar"): + pa_type = pa.string() + + else: + pa_type = _TRINO_TO_PA_TYPE_MAP[trino_type_as_str] + + if _is_list: + return pa.list_(pa_type) + else: + return pa_type diff --git a/sdk/python/feast/infra/offline_stores/file.py b/sdk/python/feast/infra/offline_stores/file.py index 74153acaee..052d546748 100644 --- a/sdk/python/feast/infra/offline_stores/file.py +++ b/sdk/python/feast/infra/offline_stores/file.py @@ -1,52 +1,104 @@ from datetime import datetime -from typing import Callable, List, Optional, Union +from typing import Callable, List, Optional, Tuple, Union +import dask.dataframe as dd import pandas as pd import pyarrow import pytz from pydantic.typing import Literal -from feast import FileSource +from feast import FileSource, OnDemandFeatureView from feast.data_source import DataSource from feast.errors import FeastJoinKeysDuringMaterialization -from feast.feature_view import FeatureView -from feast.infra.offline_stores.offline_store import OfflineStore, RetrievalJob -from feast.infra.provider import ( +from feast.feature_view import DUMMY_ENTITY_ID, DUMMY_ENTITY_VAL, FeatureView +from feast.infra.offline_stores.file_source import SavedDatasetFileStorage +from feast.infra.offline_stores.offline_store import ( + OfflineStore, + RetrievalJob, + RetrievalMetadata, +) +from feast.infra.offline_stores.offline_utils import ( DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL, +) +from feast.infra.provider import ( _get_requested_feature_views_to_features_dict, - _run_field_mapping, + _run_dask_field_mapping, ) from feast.registry import Registry from feast.repo_config import FeastConfigBaseModel, RepoConfig +from feast.saved_dataset import SavedDatasetStorage +from feast.usage import log_exceptions_and_usage class FileOfflineStoreConfig(FeastConfigBaseModel): - """ Offline store config for local (file-based) store """ + """Offline store config for local (file-based) store""" type: Literal["file"] = "file" """ Offline store type selector""" class FileRetrievalJob(RetrievalJob): - def __init__(self, evaluation_function: Callable): + def __init__( + self, + evaluation_function: Callable, + full_feature_names: bool, + on_demand_feature_views: Optional[List[OnDemandFeatureView]] = None, + metadata: Optional[RetrievalMetadata] = None, + ): """Initialize a lazy historical retrieval job""" # The evaluation function executes a stored procedure to compute a historical retrieval. self.evaluation_function = evaluation_function + self._full_feature_names = full_feature_names + self._on_demand_feature_views = ( + on_demand_feature_views if on_demand_feature_views else [] + ) + self._metadata = metadata + + @property + def full_feature_names(self) -> bool: + return self._full_feature_names + + @property + def on_demand_feature_views(self) -> Optional[List[OnDemandFeatureView]]: + return self._on_demand_feature_views - def to_df(self): + @log_exceptions_and_usage + def _to_df_internal(self) -> pd.DataFrame: # Only execute the evaluation function to build the final historical retrieval dataframe at the last moment. - df = self.evaluation_function() + df = self.evaluation_function().compute() return df - def to_arrow(self): + @log_exceptions_and_usage + def _to_arrow_internal(self): # Only execute the evaluation function to build the final historical retrieval dataframe at the last moment. - df = self.evaluation_function() + df = self.evaluation_function().compute() return pyarrow.Table.from_pandas(df) + def persist(self, storage: SavedDatasetStorage): + assert isinstance(storage, SavedDatasetFileStorage) + filesystem, path = FileSource.create_filesystem_and_path( + storage.file_options.uri, storage.file_options.s3_endpoint_override, + ) + + if path.endswith(".parquet"): + pyarrow.parquet.write_table( + self.to_arrow(), where=path, filesystem=filesystem + ) + else: + # otherwise assume destination is directory + pyarrow.parquet.write_to_dataset( + self.to_arrow(), root_path=path, filesystem=filesystem + ) + + @property + def metadata(self) -> Optional[RetrievalMetadata]: + return self._metadata + class FileOfflineStore(OfflineStore): @staticmethod + @log_exceptions_and_usage(offline_store="file") def get_historical_features( config: RepoConfig, feature_views: List[FeatureView], @@ -56,7 +108,9 @@ def get_historical_features( project: str, full_feature_names: bool = False, ) -> RetrievalJob: - if not isinstance(entity_df, pd.DataFrame): + if not isinstance(entity_df, pd.DataFrame) and not isinstance( + entity_df, dd.DataFrame + ): raise ValueError( f"Please provide an entity_df of type {type(pd.DataFrame)} instead of type {type(entity_df)}" ) @@ -74,150 +128,152 @@ def get_historical_features( raise ValueError( f"Please provide an entity_df with a column named {DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL} representing the time of events." ) - feature_views_to_features = _get_requested_feature_views_to_features_dict( - feature_refs, feature_views + ( + feature_views_to_features, + on_demand_feature_views_to_features, + ) = _get_requested_feature_views_to_features_dict( + feature_refs, + feature_views, + registry.list_on_demand_feature_views(config.project), + ) + + entity_df_event_timestamp_range = _get_entity_df_event_timestamp_range( + entity_df, entity_df_event_timestamp_col ) # Create lazy function that is only called from the RetrievalJob object def evaluate_historical_retrieval(): - # Make sure all event timestamp fields are tz-aware. We default tz-naive fields to UTC - entity_df[entity_df_event_timestamp_col] = entity_df[ - entity_df_event_timestamp_col - ].apply(lambda x: x if x.tzinfo is not None else x.replace(tzinfo=pytz.utc)) - # Create a copy of entity_df to prevent modifying the original entity_df_with_features = entity_df.copy() - # Convert event timestamp column to datetime and normalize time zone to UTC - # This is necessary to avoid issues with pd.merge_asof - entity_df_with_features[entity_df_event_timestamp_col] = pd.to_datetime( - entity_df_with_features[entity_df_event_timestamp_col], utc=True - ) + entity_df_event_timestamp_col_type = entity_df_with_features.dtypes[ + entity_df_event_timestamp_col + ] + if ( + not hasattr(entity_df_event_timestamp_col_type, "tz") + or entity_df_event_timestamp_col_type.tz != pytz.UTC + ): + # Make sure all event timestamp fields are tz-aware. We default tz-naive fields to UTC + entity_df_with_features[ + entity_df_event_timestamp_col + ] = entity_df_with_features[entity_df_event_timestamp_col].apply( + lambda x: x if x.tzinfo is not None else x.replace(tzinfo=pytz.utc) + ) + + # Convert event timestamp column to datetime and normalize time zone to UTC + # This is necessary to avoid issues with pd.merge_asof + if isinstance(entity_df_with_features, dd.DataFrame): + entity_df_with_features[ + entity_df_event_timestamp_col + ] = dd.to_datetime( + entity_df_with_features[entity_df_event_timestamp_col], utc=True + ) + else: + entity_df_with_features[ + entity_df_event_timestamp_col + ] = pd.to_datetime( + entity_df_with_features[entity_df_event_timestamp_col], utc=True + ) # Sort event timestamp values entity_df_with_features = entity_df_with_features.sort_values( entity_df_event_timestamp_col ) + join_keys = [] + all_join_keys = [] + # Load feature view data from sources and join them incrementally for feature_view, features in feature_views_to_features.items(): - event_timestamp_column = feature_view.input.event_timestamp_column - created_timestamp_column = feature_view.input.created_timestamp_column - - # Read offline parquet data in pyarrow format - table = pyarrow.parquet.read_table(feature_view.input.path) - - # Rename columns by the field mapping dictionary if it exists - if feature_view.input.field_mapping is not None: - table = _run_field_mapping(table, feature_view.input.field_mapping) - - # Convert pyarrow table to pandas dataframe - df_to_join = table.to_pandas() - - # Make sure all timestamp fields are tz-aware. We default tz-naive fields to UTC - df_to_join[event_timestamp_column] = df_to_join[ - event_timestamp_column - ].apply( - lambda x: x if x.tzinfo is not None else x.replace(tzinfo=pytz.utc) + timestamp_field = feature_view.batch_source.timestamp_field + created_timestamp_column = ( + feature_view.batch_source.created_timestamp_column ) - if created_timestamp_column: - df_to_join[created_timestamp_column] = df_to_join[ - created_timestamp_column - ].apply( - lambda x: x - if x.tzinfo is not None - else x.replace(tzinfo=pytz.utc) - ) - - # Sort dataframe by the event timestamp column - df_to_join = df_to_join.sort_values(event_timestamp_column) - - # Build a list of all the features we should select from this source - feature_names = [] - for feature in features: - # Modify the separator for feature refs in column names to double underscore. We are using - # double underscore as separator for consistency with other databases like BigQuery, - # where there are very few characters available for use as separators - if full_feature_names: - formatted_feature_name = f"{feature_view.name}__{feature}" - else: - formatted_feature_name = feature - # Add the feature name to the list of columns - feature_names.append(formatted_feature_name) - - # Ensure that the source dataframe feature column includes the feature view name as a prefix - df_to_join.rename( - columns={feature: formatted_feature_name}, inplace=True, - ) # Build a list of entity columns to join on (from the right table) join_keys = [] + for entity_name in feature_view.entities: entity = registry.get_entity(entity_name, project) - join_keys.append(entity.join_key) - right_entity_columns = join_keys + join_key = feature_view.projection.join_key_map.get( + entity.join_key, entity.join_key + ) + join_keys.append(join_key) + right_entity_key_columns = [ - event_timestamp_column - ] + right_entity_columns + timestamp_field, + created_timestamp_column, + ] + join_keys + right_entity_key_columns = [c for c in right_entity_key_columns if c] - # Remove all duplicate entity keys (using created timestamp) - right_entity_key_sort_columns = right_entity_key_columns - if created_timestamp_column: - # If created_timestamp is available, use it to dedupe deterministically - right_entity_key_sort_columns = right_entity_key_sort_columns + [ - created_timestamp_column - ] - - df_to_join.sort_values(by=right_entity_key_sort_columns, inplace=True) - df_to_join.drop_duplicates( - right_entity_key_sort_columns, - keep="last", - ignore_index=True, - inplace=True, + all_join_keys = list(set(all_join_keys + join_keys)) + + df_to_join = _read_datasource(feature_view.batch_source) + + df_to_join, timestamp_field = _field_mapping( + df_to_join, + feature_view, + features, + right_entity_key_columns, + entity_df_event_timestamp_col, + timestamp_field, + full_feature_names, ) - # Select only the columns we need to join from the feature dataframe - df_to_join = df_to_join[right_entity_key_columns + feature_names] + df_to_join = _merge(entity_df_with_features, df_to_join, join_keys) - # Do point in-time-join between entity_df and feature dataframe - entity_df_with_features = pd.merge_asof( - entity_df_with_features, + df_to_join = _normalize_timestamp( + df_to_join, timestamp_field, created_timestamp_column + ) + + df_to_join = _filter_ttl( df_to_join, - left_on=entity_df_event_timestamp_col, - right_on=event_timestamp_column, - by=right_entity_columns, - tolerance=feature_view.ttl, + feature_view, + entity_df_event_timestamp_col, + timestamp_field, ) - # Remove right (feature table/view) event_timestamp column. - if event_timestamp_column != entity_df_event_timestamp_col: - entity_df_with_features.drop( - columns=[event_timestamp_column], inplace=True - ) + df_to_join = _drop_duplicates( + df_to_join, + all_join_keys, + timestamp_field, + created_timestamp_column, + entity_df_event_timestamp_col, + ) + + entity_df_with_features = _drop_columns( + df_to_join, timestamp_field, created_timestamp_column + ) # Ensure that we delete dataframes to free up memory del df_to_join - # Move "datetime" column to front - current_cols = entity_df_with_features.columns.tolist() - current_cols.remove(entity_df_event_timestamp_col) - entity_df_with_features = entity_df_with_features[ - [entity_df_event_timestamp_col] + current_cols - ] - - return entity_df_with_features - - job = FileRetrievalJob(evaluation_function=evaluate_historical_retrieval) + return entity_df_with_features.persist() + + job = FileRetrievalJob( + evaluation_function=evaluate_historical_retrieval, + full_feature_names=full_feature_names, + on_demand_feature_views=OnDemandFeatureView.get_requested_odfvs( + feature_refs, project, registry + ), + metadata=RetrievalMetadata( + features=feature_refs, + keys=list(set(entity_df.columns) - {entity_df_event_timestamp_col}), + min_event_timestamp=entity_df_event_timestamp_range[0], + max_event_timestamp=entity_df_event_timestamp_range[1], + ), + ) return job @staticmethod + @log_exceptions_and_usage(offline_store="file") def pull_latest_from_table_or_query( config: RepoConfig, data_source: DataSource, join_key_columns: List[str], feature_name_columns: List[str], - event_timestamp_column: str, + timestamp_field: str, created_timestamp_column: Optional[str], start_date: datetime, end_date: datetime, @@ -226,17 +282,11 @@ def pull_latest_from_table_or_query( # Create lazy function that is only called from the RetrievalJob object def evaluate_offline_job(): - source_df = pd.read_parquet(data_source.path) - # Make sure all timestamp fields are tz-aware. We default tz-naive fields to UTC - source_df[event_timestamp_column] = source_df[event_timestamp_column].apply( - lambda x: x if x.tzinfo is not None else x.replace(tzinfo=pytz.utc) + source_df = _read_datasource(data_source) + + source_df = _normalize_timestamp( + source_df, timestamp_field, created_timestamp_column ) - if created_timestamp_column: - source_df[created_timestamp_column] = source_df[ - created_timestamp_column - ].apply( - lambda x: x if x.tzinfo is not None else x.replace(tzinfo=pytz.utc) - ) source_columns = set(source_df.columns) if not set(join_key_columns).issubset(source_columns): @@ -245,24 +295,279 @@ def evaluate_offline_job(): ) ts_columns = ( - [event_timestamp_column, created_timestamp_column] + [timestamp_field, created_timestamp_column] if created_timestamp_column - else [event_timestamp_column] + else [timestamp_field] ) + # try-catch block is added to deal with this issue https://github.com/dask/dask/issues/8939. + # TODO(kevjumba): remove try catch when fix is merged upstream in Dask. + try: + if created_timestamp_column: + source_df = source_df.sort_values(by=created_timestamp_column,) - source_df.sort_values(by=ts_columns, inplace=True) + source_df = source_df.sort_values(by=timestamp_field) - filtered_df = source_df[ - (source_df[event_timestamp_column] >= start_date) - & (source_df[event_timestamp_column] < end_date) + except ZeroDivisionError: + # Use 1 partition to get around case where everything in timestamp column is the same so the partition algorithm doesn't + # try to divide by zero. + if created_timestamp_column: + source_df = source_df.sort_values( + by=created_timestamp_column, npartitions=1 + ) + + source_df = source_df.sort_values(by=timestamp_field, npartitions=1) + + source_df = source_df[ + (source_df[timestamp_field] >= start_date) + & (source_df[timestamp_field] < end_date) ] - last_values_df = filtered_df.drop_duplicates( - join_key_columns, keep="last", ignore_index=True - ) + + source_df = source_df.persist() columns_to_extract = set( join_key_columns + feature_name_columns + ts_columns ) - return last_values_df[columns_to_extract] + if join_key_columns: + source_df = source_df.drop_duplicates( + join_key_columns, keep="last", ignore_index=True + ) + else: + source_df[DUMMY_ENTITY_ID] = DUMMY_ENTITY_VAL + columns_to_extract.add(DUMMY_ENTITY_ID) + + source_df = source_df.persist() + + return source_df[list(columns_to_extract)].persist() + + # When materializing a single feature view, we don't need full feature names. On demand transforms aren't materialized + return FileRetrievalJob( + evaluation_function=evaluate_offline_job, full_feature_names=False, + ) + + @staticmethod + @log_exceptions_and_usage(offline_store="file") + def pull_all_from_table_or_query( + config: RepoConfig, + data_source: DataSource, + join_key_columns: List[str], + feature_name_columns: List[str], + timestamp_field: str, + start_date: datetime, + end_date: datetime, + ) -> RetrievalJob: + return FileOfflineStore.pull_latest_from_table_or_query( + config=config, + data_source=data_source, + join_key_columns=join_key_columns + + [timestamp_field], # avoid deduplication + feature_name_columns=feature_name_columns, + timestamp_field=timestamp_field, + created_timestamp_column=None, + start_date=start_date, + end_date=end_date, + ) + + +def _get_entity_df_event_timestamp_range( + entity_df: Union[pd.DataFrame, str], entity_df_event_timestamp_col: str, +) -> Tuple[datetime, datetime]: + if not isinstance(entity_df, pd.DataFrame): + raise ValueError( + f"Please provide an entity_df of type {type(pd.DataFrame)} instead of type {type(entity_df)}" + ) + + entity_df_event_timestamp = entity_df.loc[ + :, entity_df_event_timestamp_col + ].infer_objects() + if pd.api.types.is_string_dtype(entity_df_event_timestamp): + entity_df_event_timestamp = pd.to_datetime(entity_df_event_timestamp, utc=True) + + return ( + entity_df_event_timestamp.min().to_pydatetime(), + entity_df_event_timestamp.max().to_pydatetime(), + ) + + +def _read_datasource(data_source) -> dd.DataFrame: + storage_options = ( + { + "client_kwargs": { + "endpoint_url": data_source.file_options.s3_endpoint_override + } + } + if data_source.file_options.s3_endpoint_override + else None + ) + + return dd.read_parquet(data_source.path, storage_options=storage_options,) + + +def _field_mapping( + df_to_join: dd.DataFrame, + feature_view: FeatureView, + features: List[str], + right_entity_key_columns: List[str], + entity_df_event_timestamp_col: str, + timestamp_field: str, + full_feature_names: bool, +) -> dd.DataFrame: + # Rename columns by the field mapping dictionary if it exists + if feature_view.batch_source.field_mapping: + df_to_join = _run_dask_field_mapping( + df_to_join, feature_view.batch_source.field_mapping + ) + # Rename entity columns by the join_key_map dictionary if it exists + if feature_view.projection.join_key_map: + df_to_join = _run_dask_field_mapping( + df_to_join, feature_view.projection.join_key_map + ) + + # Build a list of all the features we should select from this source + feature_names = [] + columns_map = {} + for feature in features: + # Modify the separator for feature refs in column names to double underscore. We are using + # double underscore as separator for consistency with other databases like BigQuery, + # where there are very few characters available for use as separators + if full_feature_names: + formatted_feature_name = ( + f"{feature_view.projection.name_to_use()}__{feature}" + ) + else: + formatted_feature_name = feature + # Add the feature name to the list of columns + feature_names.append(formatted_feature_name) + columns_map[feature] = formatted_feature_name + + # Ensure that the source dataframe feature column includes the feature view name as a prefix + df_to_join = _run_dask_field_mapping(df_to_join, columns_map) + + # Select only the columns we need to join from the feature dataframe + df_to_join = df_to_join[right_entity_key_columns + feature_names] + df_to_join = df_to_join.persist() + + # Make sure to not have duplicated columns + if entity_df_event_timestamp_col == timestamp_field: + df_to_join = _run_dask_field_mapping( + df_to_join, {timestamp_field: f"__{timestamp_field}"}, + ) + timestamp_field = f"__{timestamp_field}" + + return df_to_join.persist(), timestamp_field + + +def _merge( + entity_df_with_features: dd.DataFrame, + df_to_join: dd.DataFrame, + join_keys: List[str], +) -> dd.DataFrame: + # tmp join keys needed for cross join with null join table view + tmp_join_keys = [] + if not join_keys: + entity_df_with_features["__tmp"] = 1 + df_to_join["__tmp"] = 1 + tmp_join_keys = ["__tmp"] + + # Get only data with requested entities + df_to_join = dd.merge( + entity_df_with_features, + df_to_join, + left_on=join_keys or tmp_join_keys, + right_on=join_keys or tmp_join_keys, + suffixes=("", "__"), + how="left", + ) + + if tmp_join_keys: + df_to_join = df_to_join.drop(tmp_join_keys, axis=1).persist() + else: + df_to_join = df_to_join.persist() + + return df_to_join + + +def _normalize_timestamp( + df_to_join: dd.DataFrame, timestamp_field: str, created_timestamp_column: str, +) -> dd.DataFrame: + df_to_join_types = df_to_join.dtypes + timestamp_field_type = df_to_join_types[timestamp_field] + + if created_timestamp_column: + created_timestamp_column_type = df_to_join_types[created_timestamp_column] + + if not hasattr(timestamp_field_type, "tz") or timestamp_field_type.tz != pytz.UTC: + # Make sure all timestamp fields are tz-aware. We default tz-naive fields to UTC + df_to_join[timestamp_field] = df_to_join[timestamp_field].apply( + lambda x: x if x.tzinfo is not None else x.replace(tzinfo=pytz.utc), + meta=(timestamp_field, "datetime64[ns, UTC]"), + ) + + if created_timestamp_column and ( + not hasattr(created_timestamp_column_type, "tz") + or created_timestamp_column_type.tz != pytz.UTC + ): + df_to_join[created_timestamp_column] = df_to_join[ + created_timestamp_column + ].apply( + lambda x: x if x.tzinfo is not None else x.replace(tzinfo=pytz.utc), + meta=(timestamp_field, "datetime64[ns, UTC]"), + ) + + return df_to_join.persist() + + +def _filter_ttl( + df_to_join: dd.DataFrame, + feature_view: FeatureView, + entity_df_event_timestamp_col: str, + timestamp_field: str, +) -> dd.DataFrame: + # Filter rows by defined timestamp tolerance + if feature_view.ttl and feature_view.ttl.total_seconds() != 0: + df_to_join = df_to_join[ + ( + df_to_join[timestamp_field] + >= df_to_join[entity_df_event_timestamp_col] - feature_view.ttl + ) + & (df_to_join[timestamp_field] <= df_to_join[entity_df_event_timestamp_col]) + ] + + df_to_join = df_to_join.persist() + + return df_to_join + + +def _drop_duplicates( + df_to_join: dd.DataFrame, + all_join_keys: List[str], + timestamp_field: str, + created_timestamp_column: str, + entity_df_event_timestamp_col: str, +) -> dd.DataFrame: + if created_timestamp_column: + df_to_join = df_to_join.sort_values( + by=created_timestamp_column, na_position="first" + ) + df_to_join = df_to_join.persist() + + df_to_join = df_to_join.sort_values(by=timestamp_field, na_position="first") + df_to_join = df_to_join.persist() + + df_to_join = df_to_join.drop_duplicates( + all_join_keys + [entity_df_event_timestamp_col], keep="last", ignore_index=True, + ) + + return df_to_join.persist() + + +def _drop_columns( + df_to_join: dd.DataFrame, timestamp_field: str, created_timestamp_column: str, +) -> dd.DataFrame: + entity_df_with_features = df_to_join.drop([timestamp_field], axis=1).persist() + + if created_timestamp_column: + entity_df_with_features = entity_df_with_features.drop( + [created_timestamp_column], axis=1 + ).persist() - return FileRetrievalJob(evaluation_function=evaluate_offline_job) + return entity_df_with_features diff --git a/sdk/python/feast/infra/offline_stores/file_source.py b/sdk/python/feast/infra/offline_stores/file_source.py index cf20c78a8d..a6fc7a1600 100644 --- a/sdk/python/feast/infra/offline_stores/file_source.py +++ b/sdk/python/feast/infra/offline_stores/file_source.py @@ -1,25 +1,38 @@ +import warnings from typing import Callable, Dict, Iterable, Optional, Tuple +from pyarrow._fs import FileSystem +from pyarrow._s3fs import S3FileSystem from pyarrow.parquet import ParquetFile from feast import type_map -from feast.data_format import FileFormat +from feast.data_format import FileFormat, ParquetFormat from feast.data_source import DataSource from feast.protos.feast.core.DataSource_pb2 import DataSource as DataSourceProto +from feast.protos.feast.core.SavedDataset_pb2 import ( + SavedDatasetStorage as SavedDatasetStorageProto, +) from feast.repo_config import RepoConfig +from feast.saved_dataset import SavedDatasetStorage from feast.value_type import ValueType class FileSource(DataSource): def __init__( self, - event_timestamp_column: Optional[str] = "", - file_url: Optional[str] = None, + *args, path: Optional[str] = None, - file_format: FileFormat = None, + event_timestamp_column: Optional[str] = "", + file_format: Optional[FileFormat] = None, created_timestamp_column: Optional[str] = "", field_mapping: Optional[Dict[str, str]] = None, date_partition_column: Optional[str] = "", + s3_endpoint_override: Optional[str] = None, + name: Optional[str] = "", + description: Optional[str] = "", + tags: Optional[Dict[str, str]] = None, + owner: Optional[str] = "", + timestamp_field: Optional[str] = "", ): """Create a FileSource from a file containing feature data. Only Parquet format supported. @@ -27,92 +40,124 @@ def __init__( path: File path to file containing feature data. Must contain an event_timestamp column, entity columns and feature columns. - event_timestamp_column: Event timestamp column used for point in time joins of feature values. + event_timestamp_column(optional): (Deprecated) Event timestamp column used for point in time joins of feature values. created_timestamp_column (optional): Timestamp column when row was created, used for deduplicating rows. - file_url: [Deprecated] Please see path file_format (optional): Explicitly set the file format. Allows Feast to bypass inferring the file format. field_mapping: A dictionary mapping of column names in this data source to feature names in a feature table or view. Only used for feature columns, not entities or timestamp columns. + date_partition_column (optional): Timestamp column used for partitioning. + s3_endpoint_override (optional): Overrides AWS S3 enpoint with custom S3 storage + name (optional): Name for the file source. Defaults to the path. + description (optional): A human-readable description. + tags (optional): A dictionary of key-value pairs to store arbitrary metadata. + owner (optional): The owner of the file source, typically the email of the primary + maintainer. + timestamp_field (optional): Event timestamp foe;d used for point in time + joins of feature values. Examples: - >>> FileSource(path="/data/my_features.parquet", event_timestamp_column="datetime") + >>> from feast import FileSource + >>> file_source = FileSource(path="my_features.parquet", timestamp_field="event_timestamp") """ - if path is None and file_url is None: + positional_attributes = ["path"] + _path = path + if args: + if args: + warnings.warn( + ( + "File Source parameters should be specified as a keyword argument instead of a positional arg." + "Feast 0.23+ will not support positional arguments to construct File sources" + ), + DeprecationWarning, + ) + if len(args) > len(positional_attributes): + raise ValueError( + f"Only {', '.join(positional_attributes)} are allowed as positional args when defining " + f"File sources, for backwards compatibility." + ) + if len(args) >= 1: + _path = args[0] + if _path is None: raise ValueError( 'No "path" argument provided. Please set "path" to the location of your file source.' ) - if file_url: - from warnings import warn + self.file_options = FileOptions( + file_format=file_format, + uri=_path, + s3_endpoint_override=s3_endpoint_override, + ) - warn( - 'Argument "file_url" is being deprecated. Please use the "path" argument.' + if date_partition_column: + warnings.warn( + ( + "The argument 'date_partition_column' is not supported for File sources." + "It will be removed in Feast 0.21+" + ), + DeprecationWarning, ) - else: - file_url = path - - self._file_options = FileOptions(file_format=file_format, file_url=file_url) super().__init__( - event_timestamp_column, - created_timestamp_column, - field_mapping, - date_partition_column, + name=name if name else path, + event_timestamp_column=event_timestamp_column, + created_timestamp_column=created_timestamp_column, + field_mapping=field_mapping, + description=description, + tags=tags, + owner=owner, + timestamp_field=timestamp_field, ) + # Note: Python requires redefining hash in child classes that override __eq__ + def __hash__(self): + return super().__hash__() + def __eq__(self, other): if not isinstance(other, FileSource): raise TypeError("Comparisons should only involve FileSource class objects.") return ( - self.file_options.file_url == other.file_options.file_url + super().__eq__(other) + and self.path == other.path and self.file_options.file_format == other.file_options.file_format - and self.event_timestamp_column == other.event_timestamp_column - and self.created_timestamp_column == other.created_timestamp_column - and self.field_mapping == other.field_mapping + and self.file_options.s3_endpoint_override + == other.file_options.s3_endpoint_override ) - @property - def file_options(self): - """ - Returns the file options of this data source - """ - return self._file_options - - @file_options.setter - def file_options(self, file_options): - """ - Sets the file options of this data source - """ - self._file_options = file_options - @property def path(self): """ - Returns the file path of this feature data source + Returns the path of this file data source. """ - return self._file_options.file_url + return self.file_options.uri @staticmethod def from_proto(data_source: DataSourceProto): return FileSource( + name=data_source.name, field_mapping=dict(data_source.field_mapping), file_format=FileFormat.from_proto(data_source.file_options.file_format), - path=data_source.file_options.file_url, - event_timestamp_column=data_source.event_timestamp_column, + path=data_source.file_options.uri, + timestamp_field=data_source.timestamp_field, created_timestamp_column=data_source.created_timestamp_column, - date_partition_column=data_source.date_partition_column, + s3_endpoint_override=data_source.file_options.s3_endpoint_override, + description=data_source.description, + tags=dict(data_source.tags), + owner=data_source.owner, ) def to_proto(self) -> DataSourceProto: data_source_proto = DataSourceProto( + name=self.name, type=DataSourceProto.BATCH_FILE, field_mapping=self.field_mapping, file_options=self.file_options.to_proto(), + description=self.description, + tags=self.tags, + owner=self.owner, ) - data_source_proto.event_timestamp_column = self.event_timestamp_column + data_source_proto.timestamp_field = self.timestamp_field data_source_proto.created_timestamp_column = self.created_timestamp_column - data_source_proto.date_partition_column = self.date_partition_column return data_source_proto @@ -127,55 +172,59 @@ def source_datatype_to_feast_value_type() -> Callable[[str], ValueType]: def get_table_column_names_and_types( self, config: RepoConfig ) -> Iterable[Tuple[str, str]]: - schema = ParquetFile(self.path).schema_arrow + filesystem, path = FileSource.create_filesystem_and_path( + self.path, self.file_options.s3_endpoint_override + ) + schema = ParquetFile( + path if filesystem is None else filesystem.open_input_file(path) + ).schema_arrow return zip(schema.names, map(str, schema.types)) + @staticmethod + def create_filesystem_and_path( + path: str, s3_endpoint_override: str + ) -> Tuple[Optional[FileSystem], str]: + if path.startswith("s3://"): + s3fs = S3FileSystem( + endpoint_override=s3_endpoint_override if s3_endpoint_override else None + ) + return s3fs, path.replace("s3://", "") + else: + return None, path + + def get_table_query_string(self) -> str: + pass + class FileOptions: """ - DataSource File options used to source features from a file + Configuration options for a file data source. """ def __init__( - self, file_format: Optional[FileFormat], file_url: Optional[str], + self, + file_format: Optional[FileFormat], + s3_endpoint_override: Optional[str], + uri: Optional[str], ): - self._file_format = file_format - self._file_url = file_url - - @property - def file_format(self): - """ - Returns the file format of this file - """ - return self._file_format - - @file_format.setter - def file_format(self, file_format): - """ - Sets the file format of this file """ - self._file_format = file_format + Initializes a FileOptions object. - @property - def file_url(self): - """ - Returns the file url of this file - """ - return self._file_url - - @file_url.setter - def file_url(self, file_url): - """ - Sets the file url of this file + Args: + file_format (optional): File source format, e.g. parquet. + s3_endpoint_override (optional): Custom s3 endpoint (used only with s3 uri). + uri (optional): File source url, e.g. s3:// or local file. """ - self._file_url = file_url + self.file_format = file_format + self.uri = uri or "" + self.s3_endpoint_override = s3_endpoint_override or "" @classmethod def from_proto(cls, file_options_proto: DataSourceProto.FileOptions): """ Creates a FileOptions from a protobuf representation of a file option - args: + Args: file_options_proto: a protobuf representation of a datasource Returns: @@ -183,7 +232,8 @@ def from_proto(cls, file_options_proto: DataSourceProto.FileOptions): """ file_options = cls( file_format=FileFormat.from_proto(file_options_proto.file_format), - file_url=file_options_proto.file_url, + uri=file_options_proto.uri, + s3_endpoint_override=file_options_proto.s3_endpoint_override, ) return file_options @@ -194,12 +244,49 @@ def to_proto(self) -> DataSourceProto.FileOptions: Returns: FileOptionsProto protobuf """ - file_options_proto = DataSourceProto.FileOptions( file_format=( None if self.file_format is None else self.file_format.to_proto() ), - file_url=self.file_url, + uri=self.uri, + s3_endpoint_override=self.s3_endpoint_override, ) return file_options_proto + + +class SavedDatasetFileStorage(SavedDatasetStorage): + _proto_attr_name = "file_storage" + + file_options: FileOptions + + def __init__( + self, + path: str, + file_format: FileFormat = ParquetFormat(), + s3_endpoint_override: Optional[str] = None, + ): + self.file_options = FileOptions( + file_format=file_format, + s3_endpoint_override=s3_endpoint_override, + uri=path, + ) + + @staticmethod + def from_proto(storage_proto: SavedDatasetStorageProto) -> SavedDatasetStorage: + file_options = FileOptions.from_proto(storage_proto.file_storage) + return SavedDatasetFileStorage( + path=file_options.uri, + file_format=file_options.file_format, + s3_endpoint_override=file_options.s3_endpoint_override, + ) + + def to_proto(self) -> SavedDatasetStorageProto: + return SavedDatasetStorageProto(file_storage=self.file_options.to_proto()) + + def to_data_source(self) -> DataSource: + return FileSource( + path=self.file_options.uri, + file_format=self.file_options.file_format, + s3_endpoint_override=self.file_options.s3_endpoint_override, + ) diff --git a/sdk/python/feast/infra/offline_stores/helpers.py b/sdk/python/feast/infra/offline_stores/helpers.py deleted file mode 100644 index dff604c7ed..0000000000 --- a/sdk/python/feast/infra/offline_stores/helpers.py +++ /dev/null @@ -1,31 +0,0 @@ -import importlib -from typing import Any - -from feast import errors -from feast.infra.offline_stores.offline_store import OfflineStore - - -def get_offline_store_from_config(offline_store_config: Any,) -> OfflineStore: - """Get the offline store from offline store config""" - - module_name = offline_store_config.__module__ - qualified_name = type(offline_store_config).__name__ - store_class_name = qualified_name.replace("Config", "") - try: - module = importlib.import_module(module_name) - except Exception as e: - # The original exception can be anything - either module not found, - # or any other kind of error happening during the module import time. - # So we should include the original error as well in the stack trace. - raise errors.FeastModuleImportError(module_name, "OfflineStore") from e - - # Try getting the provider class definition - try: - offline_store_class = getattr(module, store_class_name) - except AttributeError: - # This can only be one type of error, when class_name attribute does not exist in the module - # So we don't have to include the original exception here - raise errors.FeastClassImportError( - module_name, store_class_name, class_type="OfflineStore" - ) from None - return offline_store_class() diff --git a/sdk/python/feast/infra/offline_stores/offline_store.py b/sdk/python/feast/infra/offline_stores/offline_store.py index e8d32cd384..83f20bb3e5 100644 --- a/sdk/python/feast/infra/offline_stores/offline_store.py +++ b/sdk/python/feast/infra/offline_stores/offline_store.py @@ -11,32 +11,154 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import warnings from abc import ABC, abstractmethod from datetime import datetime -from typing import List, Optional, Union +from typing import TYPE_CHECKING, List, Optional, Union import pandas as pd import pyarrow from feast.data_source import DataSource +from feast.dqm.errors import ValidationFailed from feast.feature_view import FeatureView +from feast.on_demand_feature_view import OnDemandFeatureView from feast.registry import Registry from feast.repo_config import RepoConfig +from feast.saved_dataset import SavedDatasetStorage + +if TYPE_CHECKING: + from feast.saved_dataset import ValidationReference + + +class RetrievalMetadata: + min_event_timestamp: Optional[datetime] + max_event_timestamp: Optional[datetime] + + # List of feature references + features: List[str] + # List of entity keys + ODFV inputs + keys: List[str] + + def __init__( + self, + features: List[str], + keys: List[str], + min_event_timestamp: Optional[datetime] = None, + max_event_timestamp: Optional[datetime] = None, + ): + self.features = features + self.keys = keys + self.min_event_timestamp = min_event_timestamp + self.max_event_timestamp = max_event_timestamp class RetrievalJob(ABC): """RetrievalJob is used to manage the execution of a historical feature retrieval""" + @property + @abstractmethod + def full_feature_names(self) -> bool: + pass + + @property @abstractmethod - def to_df(self) -> pd.DataFrame: + def on_demand_feature_views(self) -> Optional[List[OnDemandFeatureView]]: + pass + + def to_df( + self, validation_reference: Optional["ValidationReference"] = None + ) -> pd.DataFrame: + """ + Return dataset as Pandas DataFrame synchronously including on demand transforms + Args: + validation_reference: If provided resulting dataset will be validated against this reference profile. + """ + features_df = self._to_df_internal() + + if self.on_demand_feature_views: + # TODO(adchia): Fix requirement to specify dependent feature views in feature_refs + for odfv in self.on_demand_feature_views: + features_df = features_df.join( + odfv.get_transformed_features_df( + features_df, self.full_feature_names, + ) + ) + + if validation_reference: + warnings.warn( + "Dataset validation is an experimental feature. " + "This API is unstable and it could and most probably will be changed in the future. " + "We do not guarantee that future changes will maintain backward compatibility.", + RuntimeWarning, + ) + + validation_result = validation_reference.profile.validate(features_df) + if not validation_result.is_success: + raise ValidationFailed(validation_result) + + return features_df + + @abstractmethod + def _to_df_internal(self) -> pd.DataFrame: """Return dataset as Pandas DataFrame synchronously""" pass @abstractmethod - def to_arrow(self) -> pyarrow.Table: + def _to_arrow_internal(self) -> pyarrow.Table: """Return dataset as pyarrow Table synchronously""" pass + def to_arrow( + self, validation_reference: Optional["ValidationReference"] = None + ) -> pyarrow.Table: + """ + Return dataset as pyarrow Table synchronously + Args: + validation_reference: If provided resulting dataset will be validated against this reference profile. + """ + if not self.on_demand_feature_views and not validation_reference: + return self._to_arrow_internal() + + features_df = self._to_df_internal() + if self.on_demand_feature_views: + for odfv in self.on_demand_feature_views: + features_df = features_df.join( + odfv.get_transformed_features_df( + features_df, self.full_feature_names, + ) + ) + + if validation_reference: + warnings.warn( + "Dataset validation is an experimental feature. " + "This API is unstable and it could and most probably will be changed in the future. " + "We do not guarantee that future changes will maintain backward compatibility.", + RuntimeWarning, + ) + + validation_result = validation_reference.profile.validate(features_df) + if not validation_result.is_success: + raise ValidationFailed(validation_result) + + return pyarrow.Table.from_pandas(features_df) + + @abstractmethod + def persist(self, storage: SavedDatasetStorage): + """ + Run the retrieval and persist the results in the same offline store used for read. + """ + pass + + @property + @abstractmethod + def metadata(self) -> Optional[RetrievalMetadata]: + """ + Return metadata information about retrieval. + Should be available even before materializing the dataset itself. + """ + pass + class OfflineStore(ABC): """ @@ -51,15 +173,30 @@ def pull_latest_from_table_or_query( data_source: DataSource, join_key_columns: List[str], feature_name_columns: List[str], - event_timestamp_column: str, + timestamp_field: str, created_timestamp_column: Optional[str], start_date: datetime, end_date: datetime, ) -> RetrievalJob: """ - Note that join_key_columns, feature_name_columns, event_timestamp_column, and created_timestamp_column + This method pulls data from the offline store, and the FeatureStore class is used to write + this data into the online store. This method is invoked when running materialization (using + the `feast materialize` or `feast materialize-incremental` commands, or the corresponding + FeatureStore.materialize() method. This method pulls data from the offline store, and the FeatureStore + class is used to write this data into the online store. + + Note that join_key_columns, feature_name_columns, timestamp_field, and created_timestamp_column have all already been mapped to column names of the source table and those column names are the values passed into this function. + + Args: + config: Repo configuration object + data_source: Data source to pull all of the columns from + join_key_columns: Columns of the join keys + feature_name_columns: Columns of the feature names needed + timestamp_field: Timestamp column + start_date: Starting date of query + end_date: Ending date of query """ pass @@ -75,3 +212,32 @@ def get_historical_features( full_feature_names: bool = False, ) -> RetrievalJob: pass + + @staticmethod + @abstractmethod + def pull_all_from_table_or_query( + config: RepoConfig, + data_source: DataSource, + join_key_columns: List[str], + feature_name_columns: List[str], + timestamp_field: str, + start_date: datetime, + end_date: datetime, + ) -> RetrievalJob: + """ + Returns a Retrieval Job for all join key columns, feature name columns, and the event timestamp columns that occur between the start_date and end_date. + + Note that join_key_columns, feature_name_columns, timestamp_field, and created_timestamp_column + have all already been mapped to column names of the source table and those column names are the values passed + into this function. + + Args: + config: Repo configuration object + data_source: Data source to pull all of the columns from + join_key_columns: Columns of the join keys + feature_name_columns: Columns of the feature names needed + timestamp_field: Timestamp column + start_date: Starting date of query + end_date: Ending date of query + """ + pass diff --git a/sdk/python/feast/infra/offline_stores/offline_utils.py b/sdk/python/feast/infra/offline_stores/offline_utils.py new file mode 100644 index 0000000000..b6c3d300d4 --- /dev/null +++ b/sdk/python/feast/infra/offline_stores/offline_utils.py @@ -0,0 +1,221 @@ +import uuid +from dataclasses import asdict, dataclass +from datetime import datetime, timedelta +from typing import Any, Dict, KeysView, List, Optional, Set, Tuple + +import numpy as np +import pandas as pd +from jinja2 import BaseLoader, Environment +from pandas import Timestamp + +import feast +from feast.errors import ( + EntityTimestampInferenceException, + FeastEntityDFMissingColumnsError, +) +from feast.feature_view import FeatureView +from feast.importer import import_class +from feast.infra.offline_stores.offline_store import OfflineStore +from feast.infra.provider import _get_requested_feature_views_to_features_dict +from feast.registry import Registry +from feast.utils import to_naive_utc + +DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL = "event_timestamp" + + +def infer_event_timestamp_from_entity_df(entity_schema: Dict[str, np.dtype]) -> str: + if DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL in entity_schema.keys(): + return DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL + + datetime_columns = [ + column + for column, dtype in entity_schema.items() + if pd.core.dtypes.common.is_datetime64_any_dtype(dtype) + ] + + if len(datetime_columns) == 1: + print( + f"Using {datetime_columns[0]} as the event timestamp. To specify a column explicitly, please name it {DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL}." + ) + return datetime_columns[0] + else: + raise EntityTimestampInferenceException(DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL) + + +def assert_expected_columns_in_entity_df( + entity_schema: Dict[str, np.dtype], + join_keys: Set[str], + entity_df_event_timestamp_col: str, +): + entity_columns = set(entity_schema.keys()) + expected_columns = join_keys | {entity_df_event_timestamp_col} + missing_keys = expected_columns - entity_columns + + if len(missing_keys) != 0: + raise FeastEntityDFMissingColumnsError(expected_columns, missing_keys) + + +def get_expected_join_keys( + project: str, feature_views: List["feast.FeatureView"], registry: Registry +) -> Set[str]: + join_keys = set() + for feature_view in feature_views: + entities = feature_view.entities + for entity_name in entities: + entity = registry.get_entity(entity_name, project) + join_key = feature_view.projection.join_key_map.get( + entity.join_key, entity.join_key + ) + join_keys.add(join_key) + return join_keys + + +def get_entity_df_timestamp_bounds( + entity_df: pd.DataFrame, event_timestamp_col: str +) -> Tuple[Timestamp, Timestamp]: + event_timestamp_series = entity_df[event_timestamp_col] + return event_timestamp_series.min(), event_timestamp_series.max() + + +@dataclass(frozen=True) +class FeatureViewQueryContext: + """Context object used to template a BigQuery and Redshift point-in-time SQL query""" + + name: str + ttl: int + entities: List[str] + features: List[str] # feature reference format + field_mapping: Dict[str, str] + timestamp_field: str + created_timestamp_column: Optional[str] + table_subquery: str + entity_selections: List[str] + min_event_timestamp: Optional[str] + max_event_timestamp: str + + +def get_feature_view_query_context( + feature_refs: List[str], + feature_views: List[FeatureView], + registry: Registry, + project: str, + entity_df_timestamp_range: Tuple[datetime, datetime], +) -> List[FeatureViewQueryContext]: + """ + Build a query context containing all information required to template a BigQuery and + Redshift point-in-time SQL query + """ + ( + feature_views_to_feature_map, + on_demand_feature_views_to_features, + ) = _get_requested_feature_views_to_features_dict( + feature_refs, feature_views, registry.list_on_demand_feature_views(project) + ) + + query_context = [] + for feature_view, features in feature_views_to_feature_map.items(): + join_keys, entity_selections = [], [] + for entity_name in feature_view.entities: + entity = registry.get_entity(entity_name, project) + join_key = feature_view.projection.join_key_map.get( + entity.join_key, entity.join_key + ) + join_keys.append(join_key) + entity_selections.append(f"{entity.join_key} AS {join_key}") + + if isinstance(feature_view.ttl, timedelta): + ttl_seconds = int(feature_view.ttl.total_seconds()) + else: + ttl_seconds = 0 + + reverse_field_mapping = { + v: k for k, v in feature_view.batch_source.field_mapping.items() + } + features = [reverse_field_mapping.get(feature, feature) for feature in features] + timestamp_field = reverse_field_mapping.get( + feature_view.batch_source.timestamp_field, + feature_view.batch_source.timestamp_field, + ) + created_timestamp_column = reverse_field_mapping.get( + feature_view.batch_source.created_timestamp_column, + feature_view.batch_source.created_timestamp_column, + ) + + max_event_timestamp = to_naive_utc(entity_df_timestamp_range[1]).isoformat() + min_event_timestamp = None + if feature_view.ttl: + min_event_timestamp = to_naive_utc( + entity_df_timestamp_range[0] - feature_view.ttl + ).isoformat() + + context = FeatureViewQueryContext( + name=feature_view.projection.name_to_use(), + ttl=ttl_seconds, + entities=join_keys, + features=features, + field_mapping=feature_view.batch_source.field_mapping, + timestamp_field=timestamp_field, + created_timestamp_column=created_timestamp_column, + # TODO: Make created column optional and not hardcoded + table_subquery=feature_view.batch_source.get_table_query_string(), + entity_selections=entity_selections, + min_event_timestamp=min_event_timestamp, + max_event_timestamp=max_event_timestamp, + ) + query_context.append(context) + + return query_context + + +def build_point_in_time_query( + feature_view_query_contexts: List[FeatureViewQueryContext], + left_table_query_string: str, + entity_df_event_timestamp_col: str, + entity_df_columns: KeysView[str], + query_template: str, + full_feature_names: bool = False, +) -> str: + """Build point-in-time query between each feature view table and the entity dataframe for Bigquery and Redshift""" + template = Environment(loader=BaseLoader()).from_string(source=query_template) + + final_output_feature_names = list(entity_df_columns) + final_output_feature_names.extend( + [ + ( + f"{fv.name}__{fv.field_mapping.get(feature, feature)}" + if full_feature_names + else fv.field_mapping.get(feature, feature) + ) + for fv in feature_view_query_contexts + for feature in fv.features + ] + ) + + # Add additional fields to dict + template_context = { + "left_table_query_string": left_table_query_string, + "entity_df_event_timestamp_col": entity_df_event_timestamp_col, + "unique_entity_keys": set( + [entity for fv in feature_view_query_contexts for entity in fv.entities] + ), + "featureviews": [asdict(context) for context in feature_view_query_contexts], + "full_feature_names": full_feature_names, + "final_output_feature_names": final_output_feature_names, + } + + query = template.render(template_context) + return query + + +def get_temp_entity_table_name() -> str: + """Returns a random table name for uploading the entity dataframe""" + return "feast_entity_df_" + uuid.uuid4().hex + + +def get_offline_store_from_config(offline_store_config: Any) -> OfflineStore: + """Creates an offline store corresponding to the given offline store config.""" + module_name = offline_store_config.__module__ + qualified_name = type(offline_store_config).__name__ + class_name = qualified_name.replace("Config", "") + offline_store_class = import_class(module_name, class_name, "OfflineStore") + return offline_store_class() diff --git a/sdk/python/feast/infra/offline_stores/redshift.py b/sdk/python/feast/infra/offline_stores/redshift.py index 9204ff00be..cd309c92b2 100644 --- a/sdk/python/feast/infra/offline_stores/redshift.py +++ b/sdk/python/feast/infra/offline_stores/redshift.py @@ -1,23 +1,45 @@ +import contextlib import uuid from datetime import datetime -from typing import List, Optional, Union +from typing import ( + Callable, + ContextManager, + Dict, + Iterator, + List, + Optional, + Tuple, + Union, +) +import numpy as np import pandas as pd import pyarrow as pa +from dateutil import parser from pydantic import StrictStr from pydantic.typing import Literal +from pytz import utc -from feast import RedshiftSource +from feast import OnDemandFeatureView, RedshiftSource from feast.data_source import DataSource -from feast.feature_view import FeatureView -from feast.infra.offline_stores.offline_store import OfflineStore, RetrievalJob +from feast.errors import InvalidEntityType +from feast.feature_view import DUMMY_ENTITY_ID, DUMMY_ENTITY_VAL, FeatureView +from feast.infra.offline_stores import offline_utils +from feast.infra.offline_stores.offline_store import ( + OfflineStore, + RetrievalJob, + RetrievalMetadata, +) +from feast.infra.offline_stores.redshift_source import SavedDatasetRedshiftStorage from feast.infra.utils import aws_utils from feast.registry import Registry from feast.repo_config import FeastConfigBaseModel, RepoConfig +from feast.saved_dataset import SavedDatasetStorage +from feast.usage import log_exceptions_and_usage class RedshiftOfflineStoreConfig(FeastConfigBaseModel): - """ Offline store config for AWS Redshift """ + """Offline store config for AWS Redshift""" type: Literal["redshift"] = "redshift" """ Offline store type selector""" @@ -43,12 +65,13 @@ class RedshiftOfflineStoreConfig(FeastConfigBaseModel): class RedshiftOfflineStore(OfflineStore): @staticmethod + @log_exceptions_and_usage(offline_store="redshift") def pull_latest_from_table_or_query( config: RepoConfig, data_source: DataSource, join_key_columns: List[str], feature_name_columns: List[str], - event_timestamp_column: str, + timestamp_field: str, created_timestamp_column: Optional[str], start_date: datetime, end_date: datetime, @@ -63,7 +86,7 @@ def pull_latest_from_table_or_query( partition_by_join_key_string = ( "PARTITION BY " + partition_by_join_key_string ) - timestamp_columns = [event_timestamp_column] + timestamp_columns = [timestamp_field] if created_timestamp_column: timestamp_columns.append(created_timestamp_column) timestamp_desc_string = " DESC, ".join(timestamp_columns) + " DESC" @@ -76,24 +99,72 @@ def pull_latest_from_table_or_query( ) s3_resource = aws_utils.get_s3_resource(config.offline_store.region) + start_date = start_date.astimezone(tz=utc) + end_date = end_date.astimezone(tz=utc) + query = f""" - SELECT {field_string} + SELECT + {field_string} + {f", {repr(DUMMY_ENTITY_VAL)} AS {DUMMY_ENTITY_ID}" if not join_key_columns else ""} FROM ( SELECT {field_string}, ROW_NUMBER() OVER({partition_by_join_key_string} ORDER BY {timestamp_desc_string}) AS _feast_row FROM {from_expression} - WHERE {event_timestamp_column} BETWEEN TIMESTAMP '{start_date}' AND TIMESTAMP '{end_date}' + WHERE {timestamp_field} BETWEEN TIMESTAMP '{start_date}' AND TIMESTAMP '{end_date}' ) WHERE _feast_row = 1 """ + # When materializing a single feature view, we don't need full feature names. On demand transforms aren't materialized + return RedshiftRetrievalJob( + query=query, + redshift_client=redshift_client, + s3_resource=s3_resource, + config=config, + full_feature_names=False, + ) + + @staticmethod + @log_exceptions_and_usage(offline_store="redshift") + def pull_all_from_table_or_query( + config: RepoConfig, + data_source: DataSource, + join_key_columns: List[str], + feature_name_columns: List[str], + timestamp_field: str, + start_date: datetime, + end_date: datetime, + ) -> RetrievalJob: + assert isinstance(data_source, RedshiftSource) + from_expression = data_source.get_table_query_string() + + field_string = ", ".join( + join_key_columns + feature_name_columns + [timestamp_field] + ) + + redshift_client = aws_utils.get_redshift_data_client( + config.offline_store.region + ) + s3_resource = aws_utils.get_s3_resource(config.offline_store.region) + + start_date = start_date.astimezone(tz=utc) + end_date = end_date.astimezone(tz=utc) + + query = f""" + SELECT {field_string} + FROM {from_expression} + WHERE {timestamp_field} BETWEEN TIMESTAMP '{start_date}' AND TIMESTAMP '{end_date}' + """ + return RedshiftRetrievalJob( query=query, redshift_client=redshift_client, s3_resource=s3_resource, config=config, + full_feature_names=False, ) @staticmethod + @log_exceptions_and_usage(offline_store="redshift") def get_historical_features( config: RepoConfig, feature_views: List[FeatureView], @@ -103,20 +174,121 @@ def get_historical_features( project: str, full_feature_names: bool = False, ) -> RetrievalJob: - pass + assert isinstance(config.offline_store, RedshiftOfflineStoreConfig) + + redshift_client = aws_utils.get_redshift_data_client( + config.offline_store.region + ) + s3_resource = aws_utils.get_s3_resource(config.offline_store.region) + + entity_schema = _get_entity_schema( + entity_df, redshift_client, config, s3_resource + ) + + entity_df_event_timestamp_col = offline_utils.infer_event_timestamp_from_entity_df( + entity_schema + ) + + entity_df_event_timestamp_range = _get_entity_df_event_timestamp_range( + entity_df, entity_df_event_timestamp_col, redshift_client, config, + ) + + @contextlib.contextmanager + def query_generator() -> Iterator[str]: + table_name = offline_utils.get_temp_entity_table_name() + + _upload_entity_df( + entity_df, redshift_client, config, s3_resource, table_name + ) + + expected_join_keys = offline_utils.get_expected_join_keys( + project, feature_views, registry + ) + + offline_utils.assert_expected_columns_in_entity_df( + entity_schema, expected_join_keys, entity_df_event_timestamp_col + ) + + # Build a query context containing all information required to template the Redshift SQL query + query_context = offline_utils.get_feature_view_query_context( + feature_refs, + feature_views, + registry, + project, + entity_df_event_timestamp_range, + ) + + # Generate the Redshift SQL query from the query context + query = offline_utils.build_point_in_time_query( + query_context, + left_table_query_string=table_name, + entity_df_event_timestamp_col=entity_df_event_timestamp_col, + entity_df_columns=entity_schema.keys(), + query_template=MULTIPLE_FEATURE_VIEW_POINT_IN_TIME_JOIN, + full_feature_names=full_feature_names, + ) + + try: + yield query + finally: + # Always clean up the uploaded Redshift table + aws_utils.execute_redshift_statement( + redshift_client, + config.offline_store.cluster_id, + config.offline_store.database, + config.offline_store.user, + f"DROP TABLE IF EXISTS {table_name}", + ) + + return RedshiftRetrievalJob( + query=query_generator, + redshift_client=redshift_client, + s3_resource=s3_resource, + config=config, + full_feature_names=full_feature_names, + on_demand_feature_views=OnDemandFeatureView.get_requested_odfvs( + feature_refs, project, registry + ), + metadata=RetrievalMetadata( + features=feature_refs, + keys=list(entity_schema.keys() - {entity_df_event_timestamp_col}), + min_event_timestamp=entity_df_event_timestamp_range[0], + max_event_timestamp=entity_df_event_timestamp_range[1], + ), + ) class RedshiftRetrievalJob(RetrievalJob): - def __init__(self, query: str, redshift_client, s3_resource, config: RepoConfig): + def __init__( + self, + query: Union[str, Callable[[], ContextManager[str]]], + redshift_client, + s3_resource, + config: RepoConfig, + full_feature_names: bool, + on_demand_feature_views: Optional[List[OnDemandFeatureView]] = None, + metadata: Optional[RetrievalMetadata] = None, + ): """Initialize RedshiftRetrievalJob object. Args: - query: Redshift SQL query to execute. + query: Redshift SQL query to execute. Either a string, or a generator function that handles the artifact cleanup. redshift_client: boto3 redshift-data client s3_resource: boto3 s3 resource object config: Feast repo config + full_feature_names: Whether to add the feature view prefixes to the feature names + on_demand_feature_views (optional): A list of on demand transforms to apply at retrieval time """ - self.query = query + if not isinstance(query, str): + self._query_generator = query + else: + + @contextlib.contextmanager + def query_generator() -> Iterator[str]: + assert isinstance(query, str) + yield query + + self._query_generator = query_generator self._redshift_client = redshift_client self._s3_resource = s3_resource self._config = config @@ -125,50 +297,376 @@ def __init__(self, query: str, redshift_client, s3_resource, config: RepoConfig) + "/unload/" + str(uuid.uuid4()) ) - - def to_df(self) -> pd.DataFrame: - return aws_utils.unload_redshift_query_to_df( - self._redshift_client, - self._config.offline_store.cluster_id, - self._config.offline_store.database, - self._config.offline_store.user, - self._s3_resource, - self._s3_path, - self._config.offline_store.iam_role, - self.query, + self._full_feature_names = full_feature_names + self._on_demand_feature_views = ( + on_demand_feature_views if on_demand_feature_views else [] ) + self._metadata = metadata - def to_arrow(self) -> pa.Table: - return aws_utils.unload_redshift_query_to_pa( - self._redshift_client, - self._config.offline_store.cluster_id, - self._config.offline_store.database, - self._config.offline_store.user, - self._s3_resource, - self._s3_path, - self._config.offline_store.iam_role, - self.query, - ) + @property + def full_feature_names(self) -> bool: + return self._full_feature_names + + @property + def on_demand_feature_views(self) -> Optional[List[OnDemandFeatureView]]: + return self._on_demand_feature_views + + @log_exceptions_and_usage + def _to_df_internal(self) -> pd.DataFrame: + with self._query_generator() as query: + return aws_utils.unload_redshift_query_to_df( + self._redshift_client, + self._config.offline_store.cluster_id, + self._config.offline_store.database, + self._config.offline_store.user, + self._s3_resource, + self._s3_path, + self._config.offline_store.iam_role, + query, + ) + + @log_exceptions_and_usage + def _to_arrow_internal(self) -> pa.Table: + with self._query_generator() as query: + return aws_utils.unload_redshift_query_to_pa( + self._redshift_client, + self._config.offline_store.cluster_id, + self._config.offline_store.database, + self._config.offline_store.user, + self._s3_resource, + self._s3_path, + self._config.offline_store.iam_role, + query, + ) + @log_exceptions_and_usage def to_s3(self) -> str: - """ Export dataset to S3 in Parquet format and return path """ - aws_utils.execute_redshift_query_and_unload_to_s3( - self._redshift_client, - self._config.offline_store.cluster_id, - self._config.offline_store.database, - self._config.offline_store.user, - self._s3_path, - self._config.offline_store.iam_role, - self.query, - ) - return self._s3_path + """Export dataset to S3 in Parquet format and return path""" + if self.on_demand_feature_views: + transformed_df = self.to_df() + aws_utils.upload_df_to_s3(self._s3_resource, self._s3_path, transformed_df) + return self._s3_path + with self._query_generator() as query: + aws_utils.execute_redshift_query_and_unload_to_s3( + self._redshift_client, + self._config.offline_store.cluster_id, + self._config.offline_store.database, + self._config.offline_store.user, + self._s3_path, + self._config.offline_store.iam_role, + query, + ) + return self._s3_path + + @log_exceptions_and_usage def to_redshift(self, table_name: str) -> None: - """ Save dataset as a new Redshift table """ + """Save dataset as a new Redshift table""" + if self.on_demand_feature_views: + transformed_df = self.to_df() + aws_utils.upload_df_to_redshift( + self._redshift_client, + self._config.offline_store.cluster_id, + self._config.offline_store.database, + self._config.offline_store.user, + self._s3_resource, + f"{self._config.offline_store.s3_staging_location}/features_df/{table_name}.parquet", + self._config.offline_store.iam_role, + table_name, + transformed_df, + ) + return + + with self._query_generator() as query: + query = f'CREATE TABLE "{table_name}" AS ({query});\n' + + aws_utils.execute_redshift_statement( + self._redshift_client, + self._config.offline_store.cluster_id, + self._config.offline_store.database, + self._config.offline_store.user, + query, + ) + + def persist(self, storage: SavedDatasetStorage): + assert isinstance(storage, SavedDatasetRedshiftStorage) + self.to_redshift(table_name=storage.redshift_options.table) + + @property + def metadata(self) -> Optional[RetrievalMetadata]: + return self._metadata + + +def _upload_entity_df( + entity_df: Union[pd.DataFrame, str], + redshift_client, + config: RepoConfig, + s3_resource, + table_name: str, +): + if isinstance(entity_df, pd.DataFrame): + # If the entity_df is a pandas dataframe, upload it to Redshift + aws_utils.upload_df_to_redshift( + redshift_client, + config.offline_store.cluster_id, + config.offline_store.database, + config.offline_store.user, + s3_resource, + f"{config.offline_store.s3_staging_location}/entity_df/{table_name}.parquet", + config.offline_store.iam_role, + table_name, + entity_df, + ) + elif isinstance(entity_df, str): + # If the entity_df is a string (SQL query), create a Redshift table out of it aws_utils.execute_redshift_statement( - self._redshift_client, - self._config.offline_store.cluster_id, - self._config.offline_store.database, - self._config.offline_store.user, - f'CREATE TABLE "{table_name}" AS ({self.query})', + redshift_client, + config.offline_store.cluster_id, + config.offline_store.database, + config.offline_store.user, + f"CREATE TABLE {table_name} AS ({entity_df})", ) + else: + raise InvalidEntityType(type(entity_df)) + + +def _get_entity_schema( + entity_df: Union[pd.DataFrame, str], + redshift_client, + config: RepoConfig, + s3_resource, +) -> Dict[str, np.dtype]: + if isinstance(entity_df, pd.DataFrame): + return dict(zip(entity_df.columns, entity_df.dtypes)) + + elif isinstance(entity_df, str): + # get pandas dataframe consisting of 1 row (LIMIT 1) and generate the schema out of it + entity_df_sample = RedshiftRetrievalJob( + f"SELECT * FROM ({entity_df}) LIMIT 1", + redshift_client, + s3_resource, + config, + full_feature_names=False, + ).to_df() + return dict(zip(entity_df_sample.columns, entity_df_sample.dtypes)) + else: + raise InvalidEntityType(type(entity_df)) + + +def _get_entity_df_event_timestamp_range( + entity_df: Union[pd.DataFrame, str], + entity_df_event_timestamp_col: str, + redshift_client, + config: RepoConfig, +) -> Tuple[datetime, datetime]: + if isinstance(entity_df, pd.DataFrame): + entity_df_event_timestamp = entity_df.loc[ + :, entity_df_event_timestamp_col + ].infer_objects() + if pd.api.types.is_string_dtype(entity_df_event_timestamp): + entity_df_event_timestamp = pd.to_datetime( + entity_df_event_timestamp, utc=True + ) + entity_df_event_timestamp_range = ( + entity_df_event_timestamp.min().to_pydatetime(), + entity_df_event_timestamp.max().to_pydatetime(), + ) + elif isinstance(entity_df, str): + # If the entity_df is a string (SQL query), determine range + # from table + statement_id = aws_utils.execute_redshift_statement( + redshift_client, + config.offline_store.cluster_id, + config.offline_store.database, + config.offline_store.user, + f"SELECT MIN({entity_df_event_timestamp_col}) AS min, MAX({entity_df_event_timestamp_col}) AS max " + f"FROM ({entity_df})", + ) + res = aws_utils.get_redshift_statement_result(redshift_client, statement_id)[ + "Records" + ][0] + entity_df_event_timestamp_range = ( + parser.parse(res[0]["stringValue"]), + parser.parse(res[1]["stringValue"]), + ) + else: + raise InvalidEntityType(type(entity_df)) + + return entity_df_event_timestamp_range + + +# This query is based on sdk/python/feast/infra/offline_stores/bigquery.py:MULTIPLE_FEATURE_VIEW_POINT_IN_TIME_JOIN +# There are couple of changes from BigQuery: +# 1. Use VARCHAR instead of STRING type +# 2. Use "t - x * interval '1' second" instead of "Timestamp_sub(...)" +# 3. Replace `SELECT * EXCEPT (...)` with `SELECT *`, because `EXCEPT` is not supported by Redshift. +# Instead, we drop the column later after creating the table out of the query. +# We need to keep this query in sync with BigQuery. + +MULTIPLE_FEATURE_VIEW_POINT_IN_TIME_JOIN = """ +/* + Compute a deterministic hash for the `left_table_query_string` that will be used throughout + all the logic as the field to GROUP BY the data +*/ +WITH entity_dataframe AS ( + SELECT *, + {{entity_df_event_timestamp_col}} AS entity_timestamp + {% for featureview in featureviews %} + {% if featureview.entities %} + ,( + {% for entity in featureview.entities %} + CAST({{entity}} as VARCHAR) || + {% endfor %} + CAST({{entity_df_event_timestamp_col}} AS VARCHAR) + ) AS {{featureview.name}}__entity_row_unique_id + {% else %} + ,CAST({{entity_df_event_timestamp_col}} AS VARCHAR) AS {{featureview.name}}__entity_row_unique_id + {% endif %} + {% endfor %} + FROM {{ left_table_query_string }} +), + +{% for featureview in featureviews %} + +{{ featureview.name }}__entity_dataframe AS ( + SELECT + {{ featureview.entities | join(', ')}}{% if featureview.entities %},{% else %}{% endif %} + entity_timestamp, + {{featureview.name}}__entity_row_unique_id + FROM entity_dataframe + GROUP BY + {{ featureview.entities | join(', ')}}{% if featureview.entities %},{% else %}{% endif %} + entity_timestamp, + {{featureview.name}}__entity_row_unique_id +), + +/* + This query template performs the point-in-time correctness join for a single feature set table + to the provided entity table. + + 1. We first join the current feature_view to the entity dataframe that has been passed. + This JOIN has the following logic: + - For each row of the entity dataframe, only keep the rows where the `timestamp_field` + is less than the one provided in the entity dataframe + - If there a TTL for the current feature_view, also keep the rows where the `timestamp_field` + is higher the the one provided minus the TTL + - For each row, Join on the entity key and retrieve the `entity_row_unique_id` that has been + computed previously + + The output of this CTE will contain all the necessary information and already filtered out most + of the data that is not relevant. +*/ + +{{ featureview.name }}__subquery AS ( + SELECT + {{ featureview.timestamp_field }} as event_timestamp, + {{ featureview.created_timestamp_column ~ ' as created_timestamp,' if featureview.created_timestamp_column else '' }} + {{ featureview.entity_selections | join(', ')}}{% if featureview.entity_selections %},{% else %}{% endif %} + {% for feature in featureview.features %} + {{ feature }} as {% if full_feature_names %}{{ featureview.name }}__{{featureview.field_mapping.get(feature, feature)}}{% else %}{{ featureview.field_mapping.get(feature, feature) }}{% endif %}{% if loop.last %}{% else %}, {% endif %} + {% endfor %} + FROM {{ featureview.table_subquery }} + WHERE {{ featureview.timestamp_field }} <= '{{ featureview.max_event_timestamp }}' + {% if featureview.ttl == 0 %}{% else %} + AND {{ featureview.timestamp_field }} >= '{{ featureview.min_event_timestamp }}' + {% endif %} +), + +{{ featureview.name }}__base AS ( + SELECT + subquery.*, + entity_dataframe.entity_timestamp, + entity_dataframe.{{featureview.name}}__entity_row_unique_id + FROM {{ featureview.name }}__subquery AS subquery + INNER JOIN {{ featureview.name }}__entity_dataframe AS entity_dataframe + ON TRUE + AND subquery.event_timestamp <= entity_dataframe.entity_timestamp + + {% if featureview.ttl == 0 %}{% else %} + AND subquery.event_timestamp >= entity_dataframe.entity_timestamp - {{ featureview.ttl }} * interval '1' second + {% endif %} + + {% for entity in featureview.entities %} + AND subquery.{{ entity }} = entity_dataframe.{{ entity }} + {% endfor %} +), + +/* + 2. If the `created_timestamp_column` has been set, we need to + deduplicate the data first. This is done by calculating the + `MAX(created_at_timestamp)` for each event_timestamp. + We then join the data on the next CTE +*/ +{% if featureview.created_timestamp_column %} +{{ featureview.name }}__dedup AS ( + SELECT + {{featureview.name}}__entity_row_unique_id, + event_timestamp, + MAX(created_timestamp) as created_timestamp + FROM {{ featureview.name }}__base + GROUP BY {{featureview.name}}__entity_row_unique_id, event_timestamp +), +{% endif %} + +/* + 3. The data has been filtered during the first CTE "*__base" + Thus we only need to compute the latest timestamp of each feature. +*/ +{{ featureview.name }}__latest AS ( + SELECT + event_timestamp, + {% if featureview.created_timestamp_column %}created_timestamp,{% endif %} + {{featureview.name}}__entity_row_unique_id + FROM + ( + SELECT *, + ROW_NUMBER() OVER( + PARTITION BY {{featureview.name}}__entity_row_unique_id + ORDER BY event_timestamp DESC{% if featureview.created_timestamp_column %},created_timestamp DESC{% endif %} + ) AS row_number + FROM {{ featureview.name }}__base + {% if featureview.created_timestamp_column %} + INNER JOIN {{ featureview.name }}__dedup + USING ({{featureview.name}}__entity_row_unique_id, event_timestamp, created_timestamp) + {% endif %} + ) + WHERE row_number = 1 +), + +/* + 4. Once we know the latest value of each feature for a given timestamp, + we can join again the data back to the original "base" dataset +*/ +{{ featureview.name }}__cleaned AS ( + SELECT base.* + FROM {{ featureview.name }}__base as base + INNER JOIN {{ featureview.name }}__latest + USING( + {{featureview.name}}__entity_row_unique_id, + event_timestamp + {% if featureview.created_timestamp_column %} + ,created_timestamp + {% endif %} + ) +){% if loop.last %}{% else %}, {% endif %} + + +{% endfor %} +/* + Joins the outputs of multiple time travel joins to a single table. + The entity_dataframe dataset being our source of truth here. + */ + +SELECT {{ final_output_feature_names | join(', ')}} +FROM entity_dataframe +{% for featureview in featureviews %} +LEFT JOIN ( + SELECT + {{featureview.name}}__entity_row_unique_id + {% for feature in featureview.features %} + ,{% if full_feature_names %}{{ featureview.name }}__{{featureview.field_mapping.get(feature, feature)}}{% else %}{{ featureview.field_mapping.get(feature, feature) }}{% endif %} + {% endfor %} + FROM {{ featureview.name }}__cleaned +) USING ({{featureview.name}}__entity_row_unique_id) +{% endfor %} +""" diff --git a/sdk/python/feast/infra/offline_stores/redshift_source.py b/sdk/python/feast/infra/offline_stores/redshift_source.py index 81fe35fc18..00af8c1abf 100644 --- a/sdk/python/feast/infra/offline_stores/redshift_source.py +++ b/sdk/python/feast/infra/offline_stores/redshift_source.py @@ -1,43 +1,128 @@ +import warnings from typing import Callable, Dict, Iterable, Optional, Tuple from feast import type_map from feast.data_source import DataSource from feast.errors import DataSourceNotFoundException, RedshiftCredentialsError from feast.protos.feast.core.DataSource_pb2 import DataSource as DataSourceProto +from feast.protos.feast.core.SavedDataset_pb2 import ( + SavedDatasetStorage as SavedDatasetStorageProto, +) from feast.repo_config import RepoConfig +from feast.saved_dataset import SavedDatasetStorage from feast.value_type import ValueType class RedshiftSource(DataSource): def __init__( self, + *, event_timestamp_column: Optional[str] = "", table: Optional[str] = None, + schema: Optional[str] = None, created_timestamp_column: Optional[str] = "", field_mapping: Optional[Dict[str, str]] = None, - date_partition_column: Optional[str] = "", + date_partition_column: Optional[str] = None, query: Optional[str] = None, + name: Optional[str] = None, + description: Optional[str] = "", + tags: Optional[Dict[str, str]] = None, + owner: Optional[str] = "", + database: Optional[str] = "", + timestamp_field: Optional[str] = "", ): - super().__init__( - event_timestamp_column, - created_timestamp_column, - field_mapping, - date_partition_column, + """ + Creates a RedshiftSource object. + + Args: + event_timestamp_column (optional): (Deprecated) Event timestamp column used for point in + time joins of feature values. + table (optional): Redshift table where the features are stored. + schema (optional): Redshift schema in which the table is located. + created_timestamp_column (optional): Timestamp column indicating when the + row was created, used for deduplicating rows. + field_mapping (optional): A dictionary mapping of column names in this data + source to column names in a feature table or view. + date_partition_column (deprecated): Timestamp column used for partitioning. + query (optional): The query to be executed to obtain the features. + name (optional): Name for the source. Defaults to the table_ref if not specified. + description (optional): A human-readable description. + tags (optional): A dictionary of key-value pairs to store arbitrary metadata. + owner (optional): The owner of the redshift source, typically the email of the primary + maintainer. + database (optional): The Redshift database name. + timestamp_field (optional): Event timestamp field used for point in time + joins of feature values. + """ + # The default Redshift schema is named "public". + _schema = "public" if table and not schema else schema + self.redshift_options = RedshiftOptions( + table=table, schema=_schema, query=query, database=database ) - self._redshift_options = RedshiftOptions(table=table, query=query) + if table is None and query is None: + raise ValueError('No "table" argument provided.') + _name = name + if not _name: + if table: + _name = table + else: + warnings.warn( + ( + f"Starting in Feast 0.21, Feast will require either a name for a data source (if using query) " + f"or `table`: {self.query}" + ), + DeprecationWarning, + ) + if date_partition_column: + warnings.warn( + ( + "The argument 'date_partition_column' is not supported for Redshift sources." + "It will be removed in Feast 0.21+" + ), + DeprecationWarning, + ) + + super().__init__( + name=_name if _name else "", + event_timestamp_column=event_timestamp_column, + created_timestamp_column=created_timestamp_column, + field_mapping=field_mapping, + description=description, + tags=tags, + owner=owner, + timestamp_field=timestamp_field, + ) @staticmethod def from_proto(data_source: DataSourceProto): + """ + Creates a RedshiftSource from a protobuf representation of a RedshiftSource. + + Args: + data_source: A protobuf representation of a RedshiftSource + + Returns: + A RedshiftSource object based on the data_source protobuf. + """ return RedshiftSource( + name=data_source.name, field_mapping=dict(data_source.field_mapping), table=data_source.redshift_options.table, - event_timestamp_column=data_source.event_timestamp_column, + schema=data_source.redshift_options.schema, + timestamp_field=data_source.timestamp_field, created_timestamp_column=data_source.created_timestamp_column, - date_partition_column=data_source.date_partition_column, query=data_source.redshift_options.query, + description=data_source.description, + tags=dict(data_source.tags), + owner=data_source.owner, + database=data_source.redshift_options.database, ) + # Note: Python requires redefining hash in child classes that override __eq__ + def __hash__(self): + return super().__hash__() + def __eq__(self, other): if not isinstance(other, RedshiftSource): raise TypeError( @@ -45,59 +130,63 @@ def __eq__(self, other): ) return ( - self.redshift_options.table == other.redshift_options.table + super().__eq__(other) + and self.redshift_options.table == other.redshift_options.table + and self.redshift_options.schema == other.redshift_options.schema and self.redshift_options.query == other.redshift_options.query - and self.event_timestamp_column == other.event_timestamp_column - and self.created_timestamp_column == other.created_timestamp_column - and self.field_mapping == other.field_mapping + and self.redshift_options.database == other.redshift_options.database ) @property def table(self): - return self._redshift_options.table + """Returns the table of this Redshift source.""" + return self.redshift_options.table + + @property + def schema(self): + """Returns the schema of this Redshift source.""" + return self.redshift_options.schema @property def query(self): - return self._redshift_options.query + """Returns the Redshift query of this Redshift source.""" + return self.redshift_options.query @property - def redshift_options(self): - """ - Returns the Redshift options of this data source - """ - return self._redshift_options + def database(self): + """Returns the Redshift database of this Redshift source.""" + return self.redshift_options.database - @redshift_options.setter - def redshift_options(self, _redshift_options): - """ - Sets the Redshift options of this data source + def to_proto(self) -> DataSourceProto: """ - self._redshift_options = _redshift_options + Converts a RedshiftSource object to its protobuf representation. - def to_proto(self) -> DataSourceProto: + Returns: + A DataSourceProto object. + """ data_source_proto = DataSourceProto( + name=self.name, type=DataSourceProto.BATCH_REDSHIFT, field_mapping=self.field_mapping, redshift_options=self.redshift_options.to_proto(), + description=self.description, + tags=self.tags, + owner=self.owner, + timestamp_field=self.timestamp_field, + created_timestamp_column=self.created_timestamp_column, ) - data_source_proto.event_timestamp_column = self.event_timestamp_column - data_source_proto.created_timestamp_column = self.created_timestamp_column - data_source_proto.date_partition_column = self.date_partition_column - return data_source_proto def validate(self, config: RepoConfig): # As long as the query gets successfully executed, or the table exists, # the data source is validated. We don't need the results though. - # TODO: uncomment this - # self.get_table_column_names_and_types(config) - print("Validate", self.get_table_column_names_and_types(config)) + self.get_table_column_names_and_types(config) def get_table_query_string(self) -> str: - """Returns a string that can directly be used to reference this table in SQL""" + """Returns a string that can directly be used to reference this table in SQL.""" if self.table: - return f'"{self.table}"' + return f'"{self.schema}"."{self.table}"' else: return f"({self.query})" @@ -108,6 +197,12 @@ def source_datatype_to_feast_value_type() -> Callable[[str], ValueType]: def get_table_column_names_and_types( self, config: RepoConfig ) -> Iterable[Tuple[str, str]]: + """ + Returns a mapping of column names to types for this Redshift source. + + Args: + config: A RepoConfig describing the feature repo + """ from botocore.exceptions import ClientError from feast.infra.offline_stores.redshift import RedshiftOfflineStoreConfig @@ -116,14 +211,18 @@ def get_table_column_names_and_types( assert isinstance(config.offline_store, RedshiftOfflineStoreConfig) client = aws_utils.get_redshift_data_client(config.offline_store.region) - - if self.table is not None: + if self.table: try: table = client.describe_table( ClusterIdentifier=config.offline_store.cluster_id, - Database=config.offline_store.database, + Database=( + self.database + if self.database + else config.offline_store.database + ), DbUser=config.offline_store.user, Table=self.table, + Schema=self.schema, ) except ClientError as e: if e.response["Error"]["Code"] == "ValidationException": @@ -139,7 +238,7 @@ def get_table_column_names_and_types( statement_id = aws_utils.execute_redshift_statement( client, config.offline_store.cluster_id, - config.offline_store.database, + self.database if self.database else config.offline_store.database, config.offline_store.user, f"SELECT * FROM ({self.query}) LIMIT 1", ) @@ -152,55 +251,37 @@ def get_table_column_names_and_types( class RedshiftOptions: """ - DataSource Redshift options used to source features from Redshift query + Configuration options for a Redshift data source. """ - def __init__(self, table: Optional[str], query: Optional[str]): - self._table = table - self._query = query - - @property - def query(self): - """ - Returns the Redshift SQL query referenced by this source - """ - return self._query - - @query.setter - def query(self, query): - """ - Sets the Redshift SQL query referenced by this source - """ - self._query = query - - @property - def table(self): - """ - Returns the table name of this Redshift table - """ - return self._table - - @table.setter - def table(self, table_name): - """ - Sets the table ref of this Redshift table - """ - self._table = table_name + def __init__( + self, + table: Optional[str], + schema: Optional[str], + query: Optional[str], + database: Optional[str], + ): + self.table = table or "" + self.schema = schema or "" + self.query = query or "" + self.database = database or "" @classmethod def from_proto(cls, redshift_options_proto: DataSourceProto.RedshiftOptions): """ - Creates a RedshiftOptions from a protobuf representation of a Redshift option + Creates a RedshiftOptions from a protobuf representation of a Redshift option. Args: redshift_options_proto: A protobuf representation of a DataSource Returns: - Returns a RedshiftOptions object based on the redshift_options protobuf + A RedshiftOptions object based on the redshift_options protobuf. """ - redshift_options = cls( - table=redshift_options_proto.table, query=redshift_options_proto.query, + table=redshift_options_proto.table, + schema=redshift_options_proto.schema, + query=redshift_options_proto.query, + database=redshift_options_proto.database, ) return redshift_options @@ -210,11 +291,39 @@ def to_proto(self) -> DataSourceProto.RedshiftOptions: Converts an RedshiftOptionsProto object to its protobuf representation. Returns: - RedshiftOptionsProto protobuf + A RedshiftOptionsProto protobuf. """ - redshift_options_proto = DataSourceProto.RedshiftOptions( - table=self.table, query=self.query, + table=self.table, + schema=self.schema, + query=self.query, + database=self.database, ) return redshift_options_proto + + +class SavedDatasetRedshiftStorage(SavedDatasetStorage): + _proto_attr_name = "redshift_storage" + + redshift_options: RedshiftOptions + + def __init__(self, table_ref: str): + self.redshift_options = RedshiftOptions( + table=table_ref, schema=None, query=None, database=None + ) + + @staticmethod + def from_proto(storage_proto: SavedDatasetStorageProto) -> SavedDatasetStorage: + + return SavedDatasetRedshiftStorage( + table_ref=RedshiftOptions.from_proto(storage_proto.redshift_storage).table + ) + + def to_proto(self) -> SavedDatasetStorageProto: + return SavedDatasetStorageProto( + redshift_storage=self.redshift_options.to_proto() + ) + + def to_data_source(self) -> DataSource: + return RedshiftSource(table=self.redshift_options.table) diff --git a/sdk/python/feast/infra/offline_stores/snowflake.py b/sdk/python/feast/infra/offline_stores/snowflake.py new file mode 100644 index 0000000000..a07f7a57c6 --- /dev/null +++ b/sdk/python/feast/infra/offline_stores/snowflake.py @@ -0,0 +1,636 @@ +import contextlib +import os +from datetime import datetime +from pathlib import Path +from typing import ( + Callable, + ContextManager, + Dict, + Iterator, + List, + Optional, + Tuple, + Union, + cast, +) + +import numpy as np +import pandas as pd +import pyarrow as pa +from pydantic import Field +from pydantic.typing import Literal +from pytz import utc + +from feast import OnDemandFeatureView +from feast.data_source import DataSource +from feast.errors import InvalidEntityType +from feast.feature_view import DUMMY_ENTITY_ID, DUMMY_ENTITY_VAL, FeatureView +from feast.infra.offline_stores import offline_utils +from feast.infra.offline_stores.offline_store import ( + OfflineStore, + RetrievalJob, + RetrievalMetadata, +) +from feast.infra.offline_stores.snowflake_source import ( + SavedDatasetSnowflakeStorage, + SnowflakeSource, +) +from feast.infra.utils.snowflake_utils import ( + execute_snowflake_statement, + get_snowflake_conn, + write_pandas, +) +from feast.registry import Registry +from feast.repo_config import FeastConfigBaseModel, RepoConfig +from feast.saved_dataset import SavedDatasetStorage +from feast.usage import log_exceptions_and_usage + +try: + from snowflake.connector import SnowflakeConnection +except ImportError as e: + from feast.errors import FeastExtrasDependencyImportError + + raise FeastExtrasDependencyImportError("snowflake", str(e)) + + +class SnowflakeOfflineStoreConfig(FeastConfigBaseModel): + """Offline store config for Snowflake""" + + type: Literal["snowflake.offline"] = "snowflake.offline" + """ Offline store type selector""" + + config_path: Optional[str] = ( + Path(os.environ["HOME"]) / ".snowsql/config" + ).__str__() + """ Snowflake config path -- absolute path required (Cant use ~)""" + + account: Optional[str] = None + """ Snowflake deployment identifier -- drop .snowflakecomputing.com""" + + user: Optional[str] = None + """ Snowflake user name """ + + password: Optional[str] = None + """ Snowflake password """ + + role: Optional[str] = None + """ Snowflake role name""" + + warehouse: Optional[str] = None + """ Snowflake warehouse name """ + + database: Optional[str] = None + """ Snowflake database name """ + + schema_: Optional[str] = Field(None, alias="schema") + """ Snowflake schema name """ + + class Config: + allow_population_by_field_name = True + + +class SnowflakeOfflineStore(OfflineStore): + @staticmethod + @log_exceptions_and_usage(offline_store="snowflake") + def pull_latest_from_table_or_query( + config: RepoConfig, + data_source: DataSource, + join_key_columns: List[str], + feature_name_columns: List[str], + timestamp_field: str, + created_timestamp_column: Optional[str], + start_date: datetime, + end_date: datetime, + ) -> RetrievalJob: + assert isinstance(data_source, SnowflakeSource) + assert isinstance(config.offline_store, SnowflakeOfflineStoreConfig) + + from_expression = ( + data_source.get_table_query_string() + ) # returns schema.table as a string + + if join_key_columns: + partition_by_join_key_string = '"' + '", "'.join(join_key_columns) + '"' + partition_by_join_key_string = ( + "PARTITION BY " + partition_by_join_key_string + ) + else: + partition_by_join_key_string = "" + + timestamp_columns = [timestamp_field] + if created_timestamp_column: + timestamp_columns.append(created_timestamp_column) + + timestamp_desc_string = '"' + '" DESC, "'.join(timestamp_columns) + '" DESC' + field_string = ( + '"' + + '", "'.join(join_key_columns + feature_name_columns + timestamp_columns) + + '"' + ) + + if data_source.snowflake_options.warehouse: + config.offline_store.warehouse = data_source.snowflake_options.warehouse + + snowflake_conn = get_snowflake_conn(config.offline_store) + + query = f""" + SELECT + {field_string} + {f''', TRIM({repr(DUMMY_ENTITY_VAL)}::VARIANT,'"') AS "{DUMMY_ENTITY_ID}"''' if not join_key_columns else ""} + FROM ( + SELECT {field_string}, + ROW_NUMBER() OVER({partition_by_join_key_string} ORDER BY {timestamp_desc_string}) AS "_feast_row" + FROM {from_expression} + WHERE "{timestamp_field}" BETWEEN TO_TIMESTAMP_NTZ({start_date.timestamp()}) AND TO_TIMESTAMP_NTZ({end_date.timestamp()}) + ) + WHERE "_feast_row" = 1 + """ + + return SnowflakeRetrievalJob( + query=query, + snowflake_conn=snowflake_conn, + config=config, + full_feature_names=False, + on_demand_feature_views=None, + ) + + @staticmethod + @log_exceptions_and_usage(offline_store="snowflake") + def pull_all_from_table_or_query( + config: RepoConfig, + data_source: DataSource, + join_key_columns: List[str], + feature_name_columns: List[str], + timestamp_field: str, + start_date: datetime, + end_date: datetime, + ) -> RetrievalJob: + assert isinstance(data_source, SnowflakeSource) + from_expression = data_source.get_table_query_string() + + field_string = ( + '"' + + '", "'.join(join_key_columns + feature_name_columns + [timestamp_field]) + + '"' + ) + + if data_source.snowflake_options.warehouse: + config.offline_store.warehouse = data_source.snowflake_options.warehouse + + snowflake_conn = get_snowflake_conn(config.offline_store) + + start_date = start_date.astimezone(tz=utc) + end_date = end_date.astimezone(tz=utc) + + query = f""" + SELECT {field_string} + FROM {from_expression} + WHERE "{timestamp_field}" BETWEEN TIMESTAMP '{start_date}' AND TIMESTAMP '{end_date}' + """ + + return SnowflakeRetrievalJob( + query=query, + snowflake_conn=snowflake_conn, + config=config, + full_feature_names=False, + ) + + @staticmethod + @log_exceptions_and_usage(offline_store="snowflake") + def get_historical_features( + config: RepoConfig, + feature_views: List[FeatureView], + feature_refs: List[str], + entity_df: Union[pd.DataFrame, str], + registry: Registry, + project: str, + full_feature_names: bool = False, + ) -> RetrievalJob: + assert isinstance(config.offline_store, SnowflakeOfflineStoreConfig) + + snowflake_conn = get_snowflake_conn(config.offline_store) + + entity_schema = _get_entity_schema(entity_df, snowflake_conn, config) + + entity_df_event_timestamp_col = offline_utils.infer_event_timestamp_from_entity_df( + entity_schema + ) + + entity_df_event_timestamp_range = _get_entity_df_event_timestamp_range( + entity_df, entity_df_event_timestamp_col, snowflake_conn, + ) + + @contextlib.contextmanager + def query_generator() -> Iterator[str]: + + table_name = offline_utils.get_temp_entity_table_name() + + _upload_entity_df(entity_df, snowflake_conn, config, table_name) + + expected_join_keys = offline_utils.get_expected_join_keys( + project, feature_views, registry + ) + + offline_utils.assert_expected_columns_in_entity_df( + entity_schema, expected_join_keys, entity_df_event_timestamp_col + ) + + # Build a query context containing all information required to template the Snowflake SQL query + query_context = offline_utils.get_feature_view_query_context( + feature_refs, + feature_views, + registry, + project, + entity_df_event_timestamp_range, + ) + + query_context = _fix_entity_selections_identifiers(query_context) + + # Generate the Snowflake SQL query from the query context + query = offline_utils.build_point_in_time_query( + query_context, + left_table_query_string=table_name, + entity_df_event_timestamp_col=entity_df_event_timestamp_col, + entity_df_columns=entity_schema.keys(), + query_template=MULTIPLE_FEATURE_VIEW_POINT_IN_TIME_JOIN, + full_feature_names=full_feature_names, + ) + + yield query + + return SnowflakeRetrievalJob( + query=query_generator, + snowflake_conn=snowflake_conn, + config=config, + full_feature_names=full_feature_names, + on_demand_feature_views=OnDemandFeatureView.get_requested_odfvs( + feature_refs, project, registry + ), + metadata=RetrievalMetadata( + features=feature_refs, + keys=list(entity_schema.keys() - {entity_df_event_timestamp_col}), + min_event_timestamp=entity_df_event_timestamp_range[0], + max_event_timestamp=entity_df_event_timestamp_range[1], + ), + ) + + +class SnowflakeRetrievalJob(RetrievalJob): + def __init__( + self, + query: Union[str, Callable[[], ContextManager[str]]], + snowflake_conn: SnowflakeConnection, + config: RepoConfig, + full_feature_names: bool, + on_demand_feature_views: Optional[List[OnDemandFeatureView]] = None, + metadata: Optional[RetrievalMetadata] = None, + ): + + if not isinstance(query, str): + self._query_generator = query + else: + + @contextlib.contextmanager + def query_generator() -> Iterator[str]: + assert isinstance(query, str) + yield query + + self._query_generator = query_generator + + self.snowflake_conn = snowflake_conn + self.config = config + self._full_feature_names = full_feature_names + self._on_demand_feature_views = ( + on_demand_feature_views if on_demand_feature_views else [] + ) + self._metadata = metadata + + @property + def full_feature_names(self) -> bool: + return self._full_feature_names + + @property + def on_demand_feature_views(self) -> Optional[List[OnDemandFeatureView]]: + return self._on_demand_feature_views + + def _to_df_internal(self) -> pd.DataFrame: + with self._query_generator() as query: + + df = execute_snowflake_statement( + self.snowflake_conn, query + ).fetch_pandas_all() + + return df + + def _to_arrow_internal(self) -> pa.Table: + with self._query_generator() as query: + + pa_table = execute_snowflake_statement( + self.snowflake_conn, query + ).fetch_arrow_all() + + if pa_table: + + return pa_table + else: + empty_result = execute_snowflake_statement(self.snowflake_conn, query) + + return pa.Table.from_pandas( + pd.DataFrame(columns=[md.name for md in empty_result.description]) + ) + + def to_snowflake(self, table_name: str) -> None: + """Save dataset as a new Snowflake table""" + if self.on_demand_feature_views is not None: + transformed_df = self.to_df() + + write_pandas( + self.snowflake_conn, transformed_df, table_name, auto_create_table=True + ) + + return None + + with self._query_generator() as query: + query = f'CREATE TABLE IF NOT EXISTS "{table_name}" AS ({query});\n' + + execute_snowflake_statement(self.snowflake_conn, query) + + def to_sql(self) -> str: + """ + Returns the SQL query that will be executed in Snowflake to build the historical feature table. + """ + with self._query_generator() as query: + return query + + def to_arrow_chunks(self, arrow_options: Optional[Dict] = None) -> Optional[List]: + with self._query_generator() as query: + + arrow_batches = execute_snowflake_statement( + self.snowflake_conn, query + ).get_result_batches() + + return arrow_batches + + def persist(self, storage: SavedDatasetStorage): + assert isinstance(storage, SavedDatasetSnowflakeStorage) + self.to_snowflake(table_name=storage.snowflake_options.table) + + @property + def metadata(self) -> Optional[RetrievalMetadata]: + return self._metadata + + +def _get_entity_schema( + entity_df: Union[pd.DataFrame, str], + snowflake_conn: SnowflakeConnection, + config: RepoConfig, +) -> Dict[str, np.dtype]: + + if isinstance(entity_df, pd.DataFrame): + + return dict(zip(entity_df.columns, entity_df.dtypes)) + + else: + + query = f"SELECT * FROM ({entity_df}) LIMIT 1" + limited_entity_df = execute_snowflake_statement( + snowflake_conn, query + ).fetch_pandas_all() + + return dict(zip(limited_entity_df.columns, limited_entity_df.dtypes)) + + +def _upload_entity_df( + entity_df: Union[pd.DataFrame, str], + snowflake_conn: SnowflakeConnection, + config: RepoConfig, + table_name: str, +) -> None: + + if isinstance(entity_df, pd.DataFrame): + # Write the data from the DataFrame to the table + write_pandas( + snowflake_conn, + entity_df, + table_name, + auto_create_table=True, + create_temp_table=True, + ) + + return None + elif isinstance(entity_df, str): + # If the entity_df is a string (SQL query), create a Snowflake table out of it, + query = f'CREATE TEMPORARY TABLE "{table_name}" AS ({entity_df})' + execute_snowflake_statement(snowflake_conn, query) + + return None + else: + raise InvalidEntityType(type(entity_df)) + + +def _fix_entity_selections_identifiers(query_context) -> list: + + for i, qc in enumerate(query_context): + for j, es in enumerate(qc.entity_selections): + query_context[i].entity_selections[j] = f'"{es}"'.replace(" AS ", '" AS "') + + return query_context + + +def _get_entity_df_event_timestamp_range( + entity_df: Union[pd.DataFrame, str], + entity_df_event_timestamp_col: str, + snowflake_conn: SnowflakeConnection, +) -> Tuple[datetime, datetime]: + if isinstance(entity_df, pd.DataFrame): + entity_df_event_timestamp = entity_df.loc[ + :, entity_df_event_timestamp_col + ].infer_objects() + if pd.api.types.is_string_dtype(entity_df_event_timestamp): + entity_df_event_timestamp = pd.to_datetime( + entity_df_event_timestamp, utc=True + ) + entity_df_event_timestamp_range = ( + entity_df_event_timestamp.min().to_pydatetime(), + entity_df_event_timestamp.max().to_pydatetime(), + ) + elif isinstance(entity_df, str): + # If the entity_df is a string (SQL query), determine range + # from table + query = f'SELECT MIN("{entity_df_event_timestamp_col}") AS "min_value", MAX("{entity_df_event_timestamp_col}") AS "max_value" FROM ({entity_df})' + results = execute_snowflake_statement(snowflake_conn, query).fetchall() + + entity_df_event_timestamp_range = cast(Tuple[datetime, datetime], results[0]) + else: + raise InvalidEntityType(type(entity_df)) + + return entity_df_event_timestamp_range + + +MULTIPLE_FEATURE_VIEW_POINT_IN_TIME_JOIN = """ +/* + Compute a deterministic hash for the `left_table_query_string` that will be used throughout + all the logic as the field to GROUP BY the data +*/ +WITH "entity_dataframe" AS ( + SELECT *, + "{{entity_df_event_timestamp_col}}" AS "entity_timestamp" + {% for featureview in featureviews %} + {% if featureview.entities %} + ,( + {% for entity in featureview.entities %} + CAST("{{entity}}" AS VARCHAR) || + {% endfor %} + CAST("{{entity_df_event_timestamp_col}}" AS VARCHAR) + ) AS "{{featureview.name}}__entity_row_unique_id" + {% else %} + ,CAST("{{entity_df_event_timestamp_col}}" AS VARCHAR) AS "{{featureview.name}}__entity_row_unique_id" + {% endif %} + {% endfor %} + FROM "{{ left_table_query_string }}" +), + +{% for featureview in featureviews %} + +"{{ featureview.name }}__entity_dataframe" AS ( + SELECT + {{ featureview.entities | map('tojson') | join(', ')}}{% if featureview.entities %},{% else %}{% endif %} + "entity_timestamp", + "{{featureview.name}}__entity_row_unique_id" + FROM "entity_dataframe" + GROUP BY + {{ featureview.entities | map('tojson') | join(', ')}}{% if featureview.entities %},{% else %}{% endif %} + "entity_timestamp", + "{{featureview.name}}__entity_row_unique_id" +), + +/* + This query template performs the point-in-time correctness join for a single feature set table + to the provided entity table. + + 1. We first join the current feature_view to the entity dataframe that has been passed. + This JOIN has the following logic: + - For each row of the entity dataframe, only keep the rows where the `timestamp_field` + is less than the one provided in the entity dataframe + - If there a TTL for the current feature_view, also keep the rows where the `timestamp_field` + is higher the the one provided minus the TTL + - For each row, Join on the entity key and retrieve the `entity_row_unique_id` that has been + computed previously + + The output of this CTE will contain all the necessary information and already filtered out most + of the data that is not relevant. +*/ + +"{{ featureview.name }}__subquery" AS ( + SELECT + "{{ featureview.timestamp_field }}" as "event_timestamp", + {{'"' ~ featureview.created_timestamp_column ~ '" as "created_timestamp",' if featureview.created_timestamp_column else '' }} + {{featureview.entity_selections | join(', ')}}{% if featureview.entity_selections %},{% else %}{% endif %} + {% for feature in featureview.features %} + "{{ feature }}" as {% if full_feature_names %}"{{ featureview.name }}__{{featureview.field_mapping.get(feature, feature)}}"{% else %}"{{ featureview.field_mapping.get(feature, feature) }}"{% endif %}{% if loop.last %}{% else %}, {% endif %} + {% endfor %} + FROM {{ featureview.table_subquery }} + WHERE "{{ featureview.timestamp_field }}" <= '{{ featureview.max_event_timestamp }}' + {% if featureview.ttl == 0 %}{% else %} + AND "{{ featureview.timestamp_field }}" >= '{{ featureview.min_event_timestamp }}' + {% endif %} +), + +"{{ featureview.name }}__base" AS ( + SELECT + "subquery".*, + "entity_dataframe"."entity_timestamp", + "entity_dataframe"."{{featureview.name}}__entity_row_unique_id" + FROM "{{ featureview.name }}__subquery" AS "subquery" + INNER JOIN "{{ featureview.name }}__entity_dataframe" AS "entity_dataframe" + ON TRUE + AND "subquery"."event_timestamp" <= "entity_dataframe"."entity_timestamp" + + {% if featureview.ttl == 0 %}{% else %} + AND "subquery"."event_timestamp" >= TIMESTAMPADD(second,-{{ featureview.ttl }},"entity_dataframe"."entity_timestamp") + {% endif %} + + {% for entity in featureview.entities %} + AND "subquery"."{{ entity }}" = "entity_dataframe"."{{ entity }}" + {% endfor %} +), + +/* + 2. If the `created_timestamp_column` has been set, we need to + deduplicate the data first. This is done by calculating the + `MAX(created_at_timestamp)` for each event_timestamp. + We then join the data on the next CTE +*/ +{% if featureview.created_timestamp_column %} +"{{ featureview.name }}__dedup" AS ( + SELECT + "{{featureview.name}}__entity_row_unique_id", + "event_timestamp", + MAX("created_timestamp") AS "created_timestamp" + FROM "{{ featureview.name }}__base" + GROUP BY "{{featureview.name}}__entity_row_unique_id", "event_timestamp" +), +{% endif %} + +/* + 3. The data has been filtered during the first CTE "*__base" + Thus we only need to compute the latest timestamp of each feature. +*/ +"{{ featureview.name }}__latest" AS ( + SELECT + "event_timestamp", + {% if featureview.created_timestamp_column %}"created_timestamp",{% endif %} + "{{featureview.name}}__entity_row_unique_id" + FROM + ( + SELECT *, + ROW_NUMBER() OVER( + PARTITION BY "{{featureview.name}}__entity_row_unique_id" + ORDER BY "event_timestamp" DESC{% if featureview.created_timestamp_column %},"created_timestamp" DESC{% endif %} + ) AS "row_number" + FROM "{{ featureview.name }}__base" + {% if featureview.created_timestamp_column %} + INNER JOIN "{{ featureview.name }}__dedup" + USING ("{{featureview.name}}__entity_row_unique_id", "event_timestamp", "created_timestamp") + {% endif %} + ) + WHERE "row_number" = 1 +), + +/* + 4. Once we know the latest value of each feature for a given timestamp, + we can join again the data back to the original "base" dataset +*/ +"{{ featureview.name }}__cleaned" AS ( + SELECT "base".* + FROM "{{ featureview.name }}__base" AS "base" + INNER JOIN "{{ featureview.name }}__latest" + USING( + "{{featureview.name}}__entity_row_unique_id", + "event_timestamp" + {% if featureview.created_timestamp_column %} + ,"created_timestamp" + {% endif %} + ) +){% if loop.last %}{% else %}, {% endif %} + + +{% endfor %} +/* + Joins the outputs of multiple time travel joins to a single table. + The entity_dataframe dataset being our source of truth here. + */ + +SELECT "{{ final_output_feature_names | join('", "')}}" +FROM "entity_dataframe" +{% for featureview in featureviews %} +LEFT JOIN ( + SELECT + "{{featureview.name}}__entity_row_unique_id" + {% for feature in featureview.features %} + ,{% if full_feature_names %}"{{ featureview.name }}__{{featureview.field_mapping.get(feature, feature)}}"{% else %}"{{ featureview.field_mapping.get(feature, feature) }}"{% endif %} + {% endfor %} + FROM "{{ featureview.name }}__cleaned" +) "{{ featureview.name }}__cleaned" USING ("{{featureview.name}}__entity_row_unique_id") +{% endfor %} +""" diff --git a/sdk/python/feast/infra/offline_stores/snowflake_source.py b/sdk/python/feast/infra/offline_stores/snowflake_source.py new file mode 100644 index 0000000000..904fc48043 --- /dev/null +++ b/sdk/python/feast/infra/offline_stores/snowflake_source.py @@ -0,0 +1,331 @@ +import warnings +from typing import Callable, Dict, Iterable, Optional, Tuple + +from feast import type_map +from feast.data_source import DataSource +from feast.protos.feast.core.DataSource_pb2 import DataSource as DataSourceProto +from feast.protos.feast.core.SavedDataset_pb2 import ( + SavedDatasetStorage as SavedDatasetStorageProto, +) +from feast.repo_config import RepoConfig +from feast.saved_dataset import SavedDatasetStorage +from feast.value_type import ValueType + + +class SnowflakeSource(DataSource): + def __init__( + self, + *, + database: Optional[str] = None, + warehouse: Optional[str] = None, + schema: Optional[str] = None, + table: Optional[str] = None, + query: Optional[str] = None, + event_timestamp_column: Optional[str] = "", + date_partition_column: Optional[str] = None, + created_timestamp_column: Optional[str] = "", + field_mapping: Optional[Dict[str, str]] = None, + name: Optional[str] = None, + description: Optional[str] = "", + tags: Optional[Dict[str, str]] = None, + owner: Optional[str] = "", + timestamp_field: Optional[str] = "", + ): + """ + Creates a SnowflakeSource object. + + Args: + database (optional): Snowflake database where the features are stored. + warehouse (optional): Snowflake warehouse where the database is stored. + schema (optional): Snowflake schema in which the table is located. + table (optional): Snowflake table where the features are stored. + event_timestamp_column (optional): (Deprecated) Event timestamp column used for point in + time joins of feature values. + query (optional): The query to be executed to obtain the features. + created_timestamp_column (optional): Timestamp column indicating when the + row was created, used for deduplicating rows. + field_mapping (optional): A dictionary mapping of column names in this data + source to column names in a feature table or view. + date_partition_column (deprecated): Timestamp column used for partitioning. + name (optional): Name for the source. Defaults to the table if not specified. + description (optional): A human-readable description. + tags (optional): A dictionary of key-value pairs to store arbitrary metadata. + owner (optional): The owner of the snowflake source, typically the email of the primary + maintainer. + """ + if table is None and query is None: + raise ValueError('No "table" argument provided.') + + # The default Snowflake schema is named "PUBLIC". + _schema = "PUBLIC" if (database and table and not schema) else schema + + self.snowflake_options = SnowflakeOptions( + database=database, + schema=_schema, + table=table, + query=query, + warehouse=warehouse, + ) + + # If no name, use the table as the default name + _name = name + if not _name: + if table: + _name = table + else: + warnings.warn( + ( + f"Starting in Feast 0.21, Feast will require either a name for a data source (if using query) " + f"or `table`: {self.query}" + ), + DeprecationWarning, + ) + + if date_partition_column: + warnings.warn( + ( + "The argument 'date_partition_column' is not supported for Snowflake sources." + "It will be removed in Feast 0.21+" + ), + DeprecationWarning, + ) + + super().__init__( + name=_name if _name else "", + event_timestamp_column=event_timestamp_column, + created_timestamp_column=created_timestamp_column, + field_mapping=field_mapping, + description=description, + tags=tags, + owner=owner, + timestamp_field=timestamp_field, + ) + + @staticmethod + def from_proto(data_source: DataSourceProto): + """ + Creates a SnowflakeSource from a protobuf representation of a SnowflakeSource. + + Args: + data_source: A protobuf representation of a SnowflakeSource + + Returns: + A SnowflakeSource object based on the data_source protobuf. + """ + return SnowflakeSource( + name=data_source.name, + field_mapping=dict(data_source.field_mapping), + database=data_source.snowflake_options.database, + schema=data_source.snowflake_options.schema, + table=data_source.snowflake_options.table, + warehouse=data_source.snowflake_options.warehouse, + timestamp_field=data_source.timestamp_field, + created_timestamp_column=data_source.created_timestamp_column, + query=data_source.snowflake_options.query, + description=data_source.description, + tags=dict(data_source.tags), + owner=data_source.owner, + ) + + # Note: Python requires redefining hash in child classes that override __eq__ + def __hash__(self): + return super().__hash__() + + def __eq__(self, other): + if not isinstance(other, SnowflakeSource): + raise TypeError( + "Comparisons should only involve SnowflakeSource class objects." + ) + + return ( + super().__eq__(other) + and self.database == other.database + and self.schema == other.schema + and self.table == other.table + and self.query == other.query + and self.warehouse == other.warehouse + ) + + @property + def database(self): + """Returns the database of this snowflake source.""" + return self.snowflake_options.database + + @property + def schema(self): + """Returns the schema of this snowflake source.""" + return self.snowflake_options.schema + + @property + def table(self): + """Returns the table of this snowflake source.""" + return self.snowflake_options.table + + @property + def query(self): + """Returns the snowflake options of this snowflake source.""" + return self.snowflake_options.query + + @property + def warehouse(self): + """Returns the warehouse of this snowflake source.""" + return self.snowflake_options.warehouse + + def to_proto(self) -> DataSourceProto: + """ + Converts a SnowflakeSource object to its protobuf representation. + + Returns: + A DataSourceProto object. + """ + data_source_proto = DataSourceProto( + name=self.name, + type=DataSourceProto.BATCH_SNOWFLAKE, + field_mapping=self.field_mapping, + snowflake_options=self.snowflake_options.to_proto(), + description=self.description, + tags=self.tags, + owner=self.owner, + ) + + data_source_proto.timestamp_field = self.timestamp_field + data_source_proto.created_timestamp_column = self.created_timestamp_column + + return data_source_proto + + def validate(self, config: RepoConfig): + # As long as the query gets successfully executed, or the table exists, + # the data source is validated. We don't need the results though. + self.get_table_column_names_and_types(config) + + def get_table_query_string(self) -> str: + """Returns a string that can directly be used to reference this table in SQL.""" + if self.database and self.table: + return f'"{self.database}"."{self.schema}"."{self.table}"' + elif self.table: + return f'"{self.table}"' + else: + return f"({self.query})" + + @staticmethod + def source_datatype_to_feast_value_type() -> Callable[[str], ValueType]: + return type_map.snowflake_python_type_to_feast_value_type + + def get_table_column_names_and_types( + self, config: RepoConfig + ) -> Iterable[Tuple[str, str]]: + """ + Returns a mapping of column names to types for this snowflake source. + + Args: + config: A RepoConfig describing the feature repo + """ + + from feast.infra.offline_stores.snowflake import SnowflakeOfflineStoreConfig + from feast.infra.utils.snowflake_utils import ( + execute_snowflake_statement, + get_snowflake_conn, + ) + + assert isinstance(config.offline_store, SnowflakeOfflineStoreConfig) + + snowflake_conn = get_snowflake_conn(config.offline_store) + + if self.database and self.table: + query = f'SELECT * FROM "{self.database}"."{self.schema}"."{self.table}" LIMIT 1' + elif self.table: + query = f'SELECT * FROM "{self.table}" LIMIT 1' + else: + query = f"SELECT * FROM ({self.query}) LIMIT 1" + + result = execute_snowflake_statement(snowflake_conn, query).fetch_pandas_all() + + if not result.empty: + metadata = result.dtypes.apply(str) + return list(zip(metadata.index, metadata)) + else: + raise ValueError("The following source:\n" + query + "\n ... is empty") + + +class SnowflakeOptions: + """ + Configuration options for a Snowflake data source. + """ + + def __init__( + self, + database: Optional[str], + schema: Optional[str], + table: Optional[str], + query: Optional[str], + warehouse: Optional[str], + ): + self.database = database or "" + self.schema = schema or "" + self.table = table or "" + self.query = query or "" + self.warehouse = warehouse or "" + + @classmethod + def from_proto(cls, snowflake_options_proto: DataSourceProto.SnowflakeOptions): + """ + Creates a SnowflakeOptions from a protobuf representation of a snowflake option. + + Args: + snowflake_options_proto: A protobuf representation of a DataSource + + Returns: + A SnowflakeOptions object based on the snowflake_options protobuf. + """ + snowflake_options = cls( + database=snowflake_options_proto.database, + schema=snowflake_options_proto.schema, + table=snowflake_options_proto.table, + query=snowflake_options_proto.query, + warehouse=snowflake_options_proto.warehouse, + ) + + return snowflake_options + + def to_proto(self) -> DataSourceProto.SnowflakeOptions: + """ + Converts an SnowflakeOptionsProto object to its protobuf representation. + + Returns: + A SnowflakeOptionsProto protobuf. + """ + snowflake_options_proto = DataSourceProto.SnowflakeOptions( + database=self.database, + schema=self.schema, + table=self.table, + query=self.query, + warehouse=self.warehouse, + ) + + return snowflake_options_proto + + +class SavedDatasetSnowflakeStorage(SavedDatasetStorage): + _proto_attr_name = "snowflake_storage" + + snowflake_options: SnowflakeOptions + + def __init__(self, table_ref: str): + self.snowflake_options = SnowflakeOptions( + database=None, schema=None, table=table_ref, query=None, warehouse=None + ) + + @staticmethod + def from_proto(storage_proto: SavedDatasetStorageProto) -> SavedDatasetStorage: + + return SavedDatasetSnowflakeStorage( + table_ref=SnowflakeOptions.from_proto(storage_proto.snowflake_storage).table + ) + + def to_proto(self) -> SavedDatasetStorageProto: + return SavedDatasetStorageProto( + snowflake_storage=self.snowflake_options.to_proto() + ) + + def to_data_source(self) -> DataSource: + return SnowflakeSource(table=self.snowflake_options.table) diff --git a/sdk/python/feast/infra/online_stores/datastore.py b/sdk/python/feast/infra/online_stores/datastore.py index e5328a2725..e975ce138c 100644 --- a/sdk/python/feast/infra/online_stores/datastore.py +++ b/sdk/python/feast/infra/online_stores/datastore.py @@ -12,26 +12,39 @@ # See the License for the specific language governing permissions and # limitations under the License. import itertools +import logging from datetime import datetime from multiprocessing.pool import ThreadPool -from typing import Any, Callable, Dict, Iterator, List, Optional, Sequence, Tuple, Union +from queue import Queue +from threading import Lock, Thread +from typing import Any, Callable, Dict, Iterator, List, Optional, Sequence, Tuple from pydantic import PositiveInt, StrictStr from pydantic.typing import Literal -from feast import Entity, FeatureTable, utils +from feast import Entity, utils +from feast.errors import FeastProviderLoginError from feast.feature_view import FeatureView +from feast.infra.infra_object import DATASTORE_INFRA_OBJECT_CLASS_TYPE, InfraObject from feast.infra.online_stores.helpers import compute_entity_id from feast.infra.online_stores.online_store import OnlineStore +from feast.protos.feast.core.DatastoreTable_pb2 import ( + DatastoreTable as DatastoreTableProto, +) +from feast.protos.feast.core.InfraObject_pb2 import InfraObject as InfraObjectProto from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto from feast.protos.feast.types.Value_pb2 import Value as ValueProto from feast.repo_config import FeastConfigBaseModel, RepoConfig +from feast.usage import log_exceptions_and_usage, tracing_span + +LOGGER = logging.getLogger(__name__) try: from google.auth.exceptions import DefaultCredentialsError from google.cloud import datastore + from google.cloud.datastore.client import Key except ImportError as e: - from feast.errors import FeastExtrasDependencyImportError, FeastProviderLoginError + from feast.errors import FeastExtrasDependencyImportError raise FeastExtrasDependencyImportError("gcp", str(e)) @@ -42,7 +55,7 @@ class DatastoreOnlineStoreConfig(FeastConfigBaseModel): - """ Online store config for GCP Datastore """ + """Online store config for GCP Datastore""" type: Literal["datastore"] = "datastore" """ Online store type selector""" @@ -68,17 +81,16 @@ class DatastoreOnlineStore(OnlineStore): _client: Optional[datastore.Client] = None + @log_exceptions_and_usage(online_store="datastore") def update( self, config: RepoConfig, - tables_to_delete: Sequence[Union[FeatureTable, FeatureView]], - tables_to_keep: Sequence[Union[FeatureTable, FeatureView]], + tables_to_delete: Sequence[FeatureView], + tables_to_keep: Sequence[FeatureView], entities_to_delete: Sequence[Entity], entities_to_keep: Sequence[Entity], partial: bool, ): - """ - """ online_config = config.online_store assert isinstance(online_config, DatastoreOnlineStoreConfig) client = self._get_client(online_config) @@ -104,12 +116,9 @@ def update( def teardown( self, config: RepoConfig, - tables: Sequence[Union[FeatureTable, FeatureView]], + tables: Sequence[FeatureView], entities: Sequence[Entity], ): - """ - There's currently no teardown done for Datastore. - """ online_config = config.online_store assert isinstance(online_config, DatastoreOnlineStoreConfig) client = self._get_client(online_config) @@ -125,24 +134,17 @@ def teardown( client.delete(key) def _get_client(self, online_config: DatastoreOnlineStoreConfig): - if not self._client: - try: - self._client = datastore.Client( - project=online_config.project_id, namespace=online_config.namespace, - ) - except DefaultCredentialsError as e: - raise FeastProviderLoginError( - str(e) - + '\nIt may be necessary to run "gcloud auth application-default login" if you would like to use your ' - "local Google Cloud account " - ) + self._client = _initialize_client( + online_config.project_id, online_config.namespace + ) return self._client + @log_exceptions_and_usage(online_store="datastore") def online_write_batch( self, config: RepoConfig, - table: Union[FeatureTable, FeatureView], + table: FeatureView, data: List[ Tuple[EntityKeyProto, Dict[str, ValueProto], datetime, Optional[datetime]] ], @@ -157,11 +159,13 @@ def online_write_batch( write_batch_size = online_config.write_batch_size feast_project = config.project - pool = ThreadPool(processes=write_concurrency) - pool.map( - lambda b: self._write_minibatch(client, feast_project, table, b, progress), - self._to_minibatches(data, batch_size=write_batch_size), - ) + with ThreadPool(processes=write_concurrency) as pool: + pool.map( + lambda b: self._write_minibatch( + client, feast_project, table, b, progress + ), + self._to_minibatches(data, batch_size=write_batch_size), + ) @staticmethod def _to_minibatches(data: ProtoBatch, batch_size) -> Iterator[ProtoBatch]: @@ -182,7 +186,7 @@ def _to_minibatches(data: ProtoBatch, batch_size) -> Iterator[ProtoBatch]: def _write_minibatch( client, project: str, - table: Union[FeatureTable, FeatureView], + table: FeatureView, data: Sequence[ Tuple[EntityKeyProto, Dict[str, ValueProto], datetime, Optional[datetime]] ], @@ -200,18 +204,18 @@ def _write_minibatch( key=key, exclude_from_indexes=("created_ts", "event_ts", "values") ) - entity.update( - dict( - key=entity_key.SerializeToString(), - values={k: v.SerializeToString() for k, v in features.items()}, - event_ts=utils.make_tzaware(timestamp), - created_ts=( - utils.make_tzaware(created_ts) - if created_ts is not None - else None - ), - ) + content_entity = datastore.Entity( + exclude_from_indexes=tuple(features.keys()) + ) + for k, v in features.items(): + content_entity[k] = v.SerializeToString() + entity["key"] = entity_key.SerializeToString() + entity["values"] = content_entity + entity["event_ts"] = utils.make_tzaware(timestamp) + entity["created_ts"] = ( + utils.make_tzaware(created_ts) if created_ts is not None else None ) + entities.append(entity) with client.transaction(): client.put_multi(entities) @@ -219,10 +223,11 @@ def _write_minibatch( if progress: progress(len(entities)) + @log_exceptions_and_usage(online_store="datastore") def online_read( self, config: RepoConfig, - table: Union[FeatureTable, FeatureView], + table: FeatureView, entity_keys: List[EntityKeyProto], requested_features: Optional[List[str]] = None, ) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]: @@ -233,14 +238,23 @@ def online_read( feast_project = config.project + keys: List[Key] = [] result: List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]] = [] for entity_key in entity_keys: document_id = compute_entity_id(entity_key) key = client.key( "Project", feast_project, "Table", table.name, "Row", document_id ) - value = client.get(key) - if value is not None: + keys.append(key) + + # NOTE: get_multi doesn't return values in the same order as the keys in the request. + # Also, len(values) can be less than len(keys) in the case of missing values. + with tracing_span(name="remote_call"): + values = client.get_multi(keys) + values_dict = {v.key: v for v in values} if values is not None else {} + for key in keys: + if key in values_dict: + value = values_dict[key] res = {} for feature_name, value_bin in value["values"].items(): val = ValueProto() @@ -249,18 +263,159 @@ def online_read( result.append((value["event_ts"], res)) else: result.append((None, None)) + return result -def _delete_all_values(client, key) -> None: +def _delete_all_values(client, key): """ Delete all data under the key path in datastore. + + Creates and uses a queue of lists of entity keys, which are batch deleted + by multiple threads. """ + + class AtomicCounter(object): + # for tracking how many deletions have already occurred; not used outside this method + def __init__(self): + self.value = 0 + self.lock = Lock() + + def increment(self): + with self.lock: + self.value += 1 + + BATCH_SIZE = 500 # Dec 2021: delete_multi has a max size of 500: https://cloud.google.com/datastore/docs/concepts/limits + NUM_THREADS = 3 + deletion_queue = Queue() + status_info_counter = AtomicCounter() + + def worker(shared_counter): + while True: + client.delete_multi(deletion_queue.get()) + shared_counter.increment() + LOGGER.debug( + f"batch deletions completed: {shared_counter.value} ({shared_counter.value * BATCH_SIZE} total entries) & outstanding queue size: {deletion_queue.qsize()}" + ) + deletion_queue.task_done() + + for _ in range(NUM_THREADS): + Thread(target=worker, args=(status_info_counter,), daemon=True).start() + + query = client.query(kind="Row", ancestor=key) while True: - query = client.query(kind="Row", ancestor=key) - entities = list(query.fetch(limit=1000)) + entities = list(query.fetch(limit=BATCH_SIZE)) if not entities: - return + break + deletion_queue.put([entity.key for entity in entities]) + + deletion_queue.join() + + +def _initialize_client( + project_id: Optional[str], namespace: Optional[str] +) -> datastore.Client: + try: + client = datastore.Client(project=project_id, namespace=namespace,) + return client + except DefaultCredentialsError as e: + raise FeastProviderLoginError( + str(e) + + '\nIt may be necessary to run "gcloud auth application-default login" if you would like to use your ' + "local Google Cloud account " + ) + + +class DatastoreTable(InfraObject): + """ + A Datastore table managed by Feast. + + Attributes: + project: The Feast project of the table. + name: The name of the table. + project_id (optional): The GCP project id. + namespace (optional): Datastore namespace. + """ + + project: str + project_id: Optional[str] + namespace: Optional[str] + + def __init__( + self, + project: str, + name: str, + project_id: Optional[str] = None, + namespace: Optional[str] = None, + ): + super().__init__(name) + self.project = project + self.project_id = project_id + self.namespace = namespace + + def to_infra_object_proto(self) -> InfraObjectProto: + datastore_table_proto = self.to_proto() + return InfraObjectProto( + infra_object_class_type=DATASTORE_INFRA_OBJECT_CLASS_TYPE, + datastore_table=datastore_table_proto, + ) + + def to_proto(self) -> Any: + datastore_table_proto = DatastoreTableProto() + datastore_table_proto.project = self.project + datastore_table_proto.name = self.name + if self.project_id: + datastore_table_proto.project_id.value = self.project_id + if self.namespace: + datastore_table_proto.namespace.value = self.namespace + return datastore_table_proto + + @staticmethod + def from_infra_object_proto(infra_object_proto: InfraObjectProto) -> Any: + datastore_table = DatastoreTable( + project=infra_object_proto.datastore_table.project, + name=infra_object_proto.datastore_table.name, + ) + + # Distinguish between null and empty string, since project_id and namespace are StringValues. + if infra_object_proto.datastore_table.HasField("project_id"): + datastore_table.project_id = ( + infra_object_proto.datastore_table.project_id.value + ) + if infra_object_proto.datastore_table.HasField("namespace"): + datastore_table.namespace = ( + infra_object_proto.datastore_table.namespace.value + ) + + return datastore_table + + @staticmethod + def from_proto(datastore_table_proto: DatastoreTableProto) -> Any: + datastore_table = DatastoreTable( + project=datastore_table_proto.project, name=datastore_table_proto.name, + ) + + # Distinguish between null and empty string, since project_id and namespace are StringValues. + if datastore_table_proto.HasField("project_id"): + datastore_table.project_id = datastore_table_proto.project_id.value + if datastore_table_proto.HasField("namespace"): + datastore_table.namespace = datastore_table_proto.namespace.value + + return datastore_table + + def update(self): + client = _initialize_client(self.project_id, self.namespace) + key = client.key("Project", self.project, "Table", self.name) + entity = datastore.Entity( + key=key, exclude_from_indexes=("created_ts", "event_ts", "values") + ) + entity.update({"created_ts": datetime.utcnow()}) + client.put(entity) + + def teardown(self): + client = _initialize_client(self.project_id, self.namespace) + key = client.key("Project", self.project, "Table", self.name) + _delete_all_values(client, key) - for entity in entities: - client.delete(entity.key) + # Delete the table metadata datastore entity + client.delete(key) diff --git a/sdk/python/feast/infra/online_stores/dynamodb.py b/sdk/python/feast/infra/online_stores/dynamodb.py index 722a081f2e..406bee525f 100644 --- a/sdk/python/feast/infra/online_stores/dynamodb.py +++ b/sdk/python/feast/infra/online_stores/dynamodb.py @@ -11,18 +11,26 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import itertools +import logging from datetime import datetime -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple from pydantic import StrictStr -from pydantic.typing import Literal +from pydantic.typing import Literal, Union -from feast import Entity, FeatureTable, FeatureView, utils +from feast import Entity, FeatureView, utils +from feast.infra.infra_object import DYNAMODB_INFRA_OBJECT_CLASS_TYPE, InfraObject from feast.infra.online_stores.helpers import compute_entity_id from feast.infra.online_stores.online_store import OnlineStore +from feast.protos.feast.core.DynamoDBTable_pb2 import ( + DynamoDBTable as DynamoDBTableProto, +) +from feast.protos.feast.core.InfraObject_pb2 import InfraObject as InfraObjectProto from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto from feast.protos.feast.types.Value_pb2 import Value as ValueProto from feast.repo_config import FeastConfigBaseModel, RepoConfig +from feast.usage import log_exceptions_and_usage, tracing_span try: import boto3 @@ -33,38 +41,71 @@ raise FeastExtrasDependencyImportError("aws", str(e)) +logger = logging.getLogger(__name__) + + class DynamoDBOnlineStoreConfig(FeastConfigBaseModel): """Online store config for DynamoDB store""" type: Literal["dynamodb"] = "dynamodb" """Online store type selector""" + batch_size: int = 40 + """Number of items to retrieve in a DynamoDB BatchGetItem call.""" + + endpoint_url: Union[str, None] = None + """DynamoDB local development endpoint Url, i.e. http://localhost:8000""" + region: StrictStr - """ AWS Region Name """ + """AWS Region Name""" + + table_name_template: StrictStr = "{project}.{table_name}" + """DynamoDB table name template""" class DynamoDBOnlineStore(OnlineStore): """ Online feature store for AWS DynamoDB. + + Attributes: + _dynamodb_client: Boto3 DynamoDB client. + _dynamodb_resource: Boto3 DynamoDB resource. """ + _dynamodb_client = None + _dynamodb_resource = None + + @log_exceptions_and_usage(online_store="dynamodb") def update( self, config: RepoConfig, - tables_to_delete: Sequence[Union[FeatureTable, FeatureView]], - tables_to_keep: Sequence[Union[FeatureTable, FeatureView]], + tables_to_delete: Sequence[FeatureView], + tables_to_keep: Sequence[FeatureView], entities_to_delete: Sequence[Entity], entities_to_keep: Sequence[Entity], partial: bool, ): + """ + Update tables from the DynamoDB Online Store. + + Args: + config: The RepoConfig for the current FeatureStore. + tables_to_delete: Tables to delete from the DynamoDB Online Store. + tables_to_keep: Tables to keep in the DynamoDB Online Store. + """ online_config = config.online_store assert isinstance(online_config, DynamoDBOnlineStoreConfig) - dynamodb_client, dynamodb_resource = self._initialize_dynamodb(online_config) + dynamodb_client = self._get_dynamodb_client( + online_config.region, online_config.endpoint_url + ) + dynamodb_resource = self._get_dynamodb_resource( + online_config.region, online_config.endpoint_url + ) for table_instance in tables_to_keep: try: dynamodb_resource.create_table( - TableName=f"{config.project}.{table_instance.name}", + TableName=_get_table_name(online_config, config, table_instance), KeySchema=[{"AttributeName": "entity_id", "KeyType": "HASH"}], AttributeDefinitions=[ {"AttributeName": "entity_id", "AttributeType": "S"} @@ -80,38 +121,181 @@ def update( for table_instance in tables_to_keep: dynamodb_client.get_waiter("table_exists").wait( - TableName=f"{config.project}.{table_instance.name}" + TableName=_get_table_name(online_config, config, table_instance) ) - self._delete_tables_idempotent(dynamodb_resource, config, tables_to_delete) + for table_to_delete in tables_to_delete: + _delete_table_idempotent( + dynamodb_resource, + _get_table_name(online_config, config, table_to_delete), + ) def teardown( self, config: RepoConfig, - tables: Sequence[Union[FeatureTable, FeatureView]], + tables: Sequence[FeatureView], entities: Sequence[Entity], ): + """ + Delete tables from the DynamoDB Online Store. + + Args: + config: The RepoConfig for the current FeatureStore. + tables: Tables to delete from the feature repo. + """ online_config = config.online_store assert isinstance(online_config, DynamoDBOnlineStoreConfig) - _, dynamodb_resource = self._initialize_dynamodb(online_config) + dynamodb_resource = self._get_dynamodb_resource( + online_config.region, online_config.endpoint_url + ) - self._delete_tables_idempotent(dynamodb_resource, config, tables) + for table in tables: + _delete_table_idempotent( + dynamodb_resource, _get_table_name(online_config, config, table) + ) + @log_exceptions_and_usage(online_store="dynamodb") def online_write_batch( self, config: RepoConfig, - table: Union[FeatureTable, FeatureView], + table: FeatureView, data: List[ Tuple[EntityKeyProto, Dict[str, ValueProto], datetime, Optional[datetime]] ], progress: Optional[Callable[[int], Any]], ) -> None: + """ + Write a batch of feature rows to online DynamoDB store. + + Note: This method applies a ``batch_writer`` to automatically handle any unprocessed items + and resend them as needed, this is useful if you're loading a lot of data at a time. + + Args: + config: The RepoConfig for the current FeatureStore. + table: Feast FeatureView. + data: a list of quadruplets containing Feature data. Each quadruplet contains an Entity Key, + a dict containing feature values, an event timestamp for the row, and + the created timestamp for the row if it exists. + progress: Optional function to be called once every mini-batch of rows is written to + the online store. Can be used to display progress. + """ + online_config = config.online_store + assert isinstance(online_config, DynamoDBOnlineStoreConfig) + dynamodb_resource = self._get_dynamodb_resource( + online_config.region, online_config.endpoint_url + ) + + table_instance = dynamodb_resource.Table( + _get_table_name(online_config, config, table) + ) + self._write_batch_non_duplicates(table_instance, data, progress) + + @log_exceptions_and_usage(online_store="dynamodb") + def online_read( + self, + config: RepoConfig, + table: FeatureView, + entity_keys: List[EntityKeyProto], + requested_features: Optional[List[str]] = None, + ) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]: + """ + Retrieve feature values from the online DynamoDB store. + + Args: + config: The RepoConfig for the current FeatureStore. + table: Feast FeatureView. + entity_keys: a list of entity keys that should be read from the FeatureStore. + """ online_config = config.online_store assert isinstance(online_config, DynamoDBOnlineStoreConfig) - _, dynamodb_resource = self._initialize_dynamodb(online_config) + dynamodb_resource = self._get_dynamodb_resource( + online_config.region, online_config.endpoint_url + ) + table_instance = dynamodb_resource.Table( + _get_table_name(online_config, config, table) + ) + + result: List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]] = [] + entity_ids = [compute_entity_id(entity_key) for entity_key in entity_keys] + batch_size = online_config.batch_size + entity_ids_iter = iter(entity_ids) + while True: + batch = list(itertools.islice(entity_ids_iter, batch_size)) + # No more items to insert + if len(batch) == 0: + break + batch_entity_ids = { + table_instance.name: { + "Keys": [{"entity_id": entity_id} for entity_id in batch] + } + } + with tracing_span(name="remote_call"): + response = dynamodb_resource.batch_get_item( + RequestItems=batch_entity_ids + ) + response = response.get("Responses") + table_responses = response.get(table_instance.name) + if table_responses: + table_responses = self._sort_dynamodb_response( + table_responses, entity_ids + ) + entity_idx = 0 + for tbl_res in table_responses: + entity_id = tbl_res["entity_id"] + while entity_id != batch[entity_idx]: + result.append((None, None)) + entity_idx += 1 + res = {} + for feature_name, value_bin in tbl_res["values"].items(): + val = ValueProto() + val.ParseFromString(value_bin.value) + res[feature_name] = val + result.append((datetime.fromisoformat(tbl_res["event_ts"]), res)) + entity_idx += 1 - table_instance = dynamodb_resource.Table(f"{config.project}.{table.name}") - with table_instance.batch_writer() as batch: + # Not all entities in a batch may have responses + # Pad with remaining values in batch that were not found + batch_size_nones = ((None, None),) * (len(batch) - len(result)) + result.extend(batch_size_nones) + return result + + def _get_dynamodb_client(self, region: str, endpoint_url: Optional[str] = None): + if self._dynamodb_client is None: + self._dynamodb_client = _initialize_dynamodb_client(region, endpoint_url) + return self._dynamodb_client + + def _get_dynamodb_resource(self, region: str, endpoint_url: Optional[str] = None): + if self._dynamodb_resource is None: + self._dynamodb_resource = _initialize_dynamodb_resource( + region, endpoint_url + ) + return self._dynamodb_resource + + def _sort_dynamodb_response(self, responses: list, order: list): + """DynamoDB Batch Get Item doesn't return items in a particular order.""" + # Assign an index to order + order_with_index = {value: idx for idx, value in enumerate(order)} + # Sort table responses by index + table_responses_ordered = [ + (order_with_index[tbl_res["entity_id"]], tbl_res) for tbl_res in responses + ] + table_responses_ordered = sorted( + table_responses_ordered, key=lambda tup: tup[0] + ) + _, table_responses_ordered = zip(*table_responses_ordered) + return table_responses_ordered + + @log_exceptions_and_usage(online_store="dynamodb") + def _write_batch_non_duplicates( + self, + table_instance, + data: List[ + Tuple[EntityKeyProto, Dict[str, ValueProto], datetime, Optional[datetime]] + ], + progress: Optional[Callable[[int], Any]], + ): + """Deduplicate write batch request items on ``entity_id`` primary key.""" + with table_instance.batch_writer(overwrite_by_pkeys=["entity_id"]) as batch: for entity_key, features, timestamp, created_ts in data: entity_id = compute_entity_id(entity_key) batch.put_item( @@ -127,56 +311,124 @@ def online_write_batch( if progress: progress(1) - def online_read( - self, - config: RepoConfig, - table: Union[FeatureTable, FeatureView], - entity_keys: List[EntityKeyProto], - requested_features: Optional[List[str]] = None, - ) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]: - online_config = config.online_store - assert isinstance(online_config, DynamoDBOnlineStoreConfig) - _, dynamodb_resource = self._initialize_dynamodb(online_config) - result: List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]] = [] - for entity_key in entity_keys: - table_instance = dynamodb_resource.Table(f"{config.project}.{table.name}") - entity_id = compute_entity_id(entity_key) - response = table_instance.get_item(Key={"entity_id": entity_id}) - value = response.get("Item") - - if value is not None: - res = {} - for feature_name, value_bin in value["values"].items(): - val = ValueProto() - val.ParseFromString(value_bin.value) - res[feature_name] = val - result.append((value["event_ts"], res)) - else: - result.append((None, None)) - return result +def _initialize_dynamodb_client(region: str, endpoint_url: Optional[str] = None): + return boto3.client("dynamodb", region_name=region, endpoint_url=endpoint_url) + + +def _initialize_dynamodb_resource(region: str, endpoint_url: Optional[str] = None): + return boto3.resource("dynamodb", region_name=region, endpoint_url=endpoint_url) + + +# TODO(achals): This form of user-facing templating is experimental. +# Please refer to https://github.com/feast-dev/feast/issues/2438 before building on top of it, +def _get_table_name( + online_config: DynamoDBOnlineStoreConfig, config: RepoConfig, table: FeatureView +) -> str: + return online_config.table_name_template.format( + project=config.project, table_name=table.name + ) + + +def _delete_table_idempotent( + dynamodb_resource, table_name: str, +): + try: + table = dynamodb_resource.Table(table_name) + table.delete() + logger.info(f"Dynamo table {table_name} was deleted") + except ClientError as ce: + # If the table deletion fails with ResourceNotFoundException, + # it means the table has already been deleted. + # Otherwise, re-raise the exception + if ce.response["Error"]["Code"] != "ResourceNotFoundException": + raise + else: + logger.warning(f"Trying to delete table that doesn't exist: {table_name}") + + +class DynamoDBTable(InfraObject): + """ + A DynamoDB table managed by Feast. + + Attributes: + name: The name of the table. + region: The region of the table. + endpoint_url: Local DynamoDB Endpoint Url. + _dynamodb_client: Boto3 DynamoDB client. + _dynamodb_resource: Boto3 DynamoDB resource. + """ - def _initialize_dynamodb(self, online_config: DynamoDBOnlineStoreConfig): - return ( - boto3.client("dynamodb", region_name=online_config.region), - boto3.resource("dynamodb", region_name=online_config.region), + region: str + endpoint_url = None + _dynamodb_client = None + _dynamodb_resource = None + + def __init__(self, name: str, region: str, endpoint_url: Optional[str] = None): + super().__init__(name) + self.region = region + self.endpoint_url = endpoint_url + + def to_infra_object_proto(self) -> InfraObjectProto: + dynamodb_table_proto = self.to_proto() + return InfraObjectProto( + infra_object_class_type=DYNAMODB_INFRA_OBJECT_CLASS_TYPE, + dynamodb_table=dynamodb_table_proto, ) - def _delete_tables_idempotent( - self, - dynamodb_resource, - config: RepoConfig, - tables: Sequence[Union[FeatureTable, FeatureView]], - ): - for table_instance in tables: - try: - table = dynamodb_resource.Table( - f"{config.project}.{table_instance.name}" - ) - table.delete() - except ClientError as ce: - # If the table deletion fails with ResourceNotFoundException, - # it means the table has already been deleted. - # Otherwise, re-raise the exception - if ce.response["Error"]["Code"] != "ResourceNotFoundException": - raise + def to_proto(self) -> Any: + dynamodb_table_proto = DynamoDBTableProto() + dynamodb_table_proto.name = self.name + dynamodb_table_proto.region = self.region + return dynamodb_table_proto + + @staticmethod + def from_infra_object_proto(infra_object_proto: InfraObjectProto) -> Any: + return DynamoDBTable( + name=infra_object_proto.dynamodb_table.name, + region=infra_object_proto.dynamodb_table.region, + ) + + @staticmethod + def from_proto(dynamodb_table_proto: DynamoDBTableProto) -> Any: + return DynamoDBTable( + name=dynamodb_table_proto.name, region=dynamodb_table_proto.region, + ) + + def update(self): + dynamodb_client = self._get_dynamodb_client(self.region, self.endpoint_url) + dynamodb_resource = self._get_dynamodb_resource(self.region, self.endpoint_url) + + try: + dynamodb_resource.create_table( + TableName=f"{self.name}", + KeySchema=[{"AttributeName": "entity_id", "KeyType": "HASH"}], + AttributeDefinitions=[ + {"AttributeName": "entity_id", "AttributeType": "S"} + ], + BillingMode="PAY_PER_REQUEST", + ) + except ClientError as ce: + # If the table creation fails with ResourceInUseException, + # it means the table already exists or is being created. + # Otherwise, re-raise the exception + if ce.response["Error"]["Code"] != "ResourceInUseException": + raise + + dynamodb_client.get_waiter("table_exists").wait(TableName=f"{self.name}") + + def teardown(self): + dynamodb_resource = self._get_dynamodb_resource(self.region, self.endpoint_url) + _delete_table_idempotent(dynamodb_resource, self.name) + + def _get_dynamodb_client(self, region: str, endpoint_url: Optional[str] = None): + if self._dynamodb_client is None: + self._dynamodb_client = _initialize_dynamodb_client(region, endpoint_url) + return self._dynamodb_client + + def _get_dynamodb_resource(self, region: str, endpoint_url: Optional[str] = None): + if self._dynamodb_resource is None: + self._dynamodb_resource = _initialize_dynamodb_resource( + region, endpoint_url + ) + return self._dynamodb_resource diff --git a/sdk/python/feast/infra/online_stores/helpers.py b/sdk/python/feast/infra/online_stores/helpers.py index 788be68b8d..b206c08b7c 100644 --- a/sdk/python/feast/infra/online_stores/helpers.py +++ b/sdk/python/feast/infra/online_stores/helpers.py @@ -1,49 +1,33 @@ -import importlib import struct -from typing import Any +from typing import Any, List import mmh3 -from feast import errors -from feast.infra.key_encoding_utils import serialize_entity_key +from feast.importer import import_class +from feast.infra.key_encoding_utils import ( + serialize_entity_key, + serialize_entity_key_prefix, +) from feast.infra.online_stores.online_store import OnlineStore -from feast.protos.feast.storage.Redis_pb2 import RedisKeyV2 as RedisKeyProto from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto -def get_online_store_from_config(online_store_config: Any,) -> OnlineStore: - """Get the offline store from offline store config""" - +def get_online_store_from_config(online_store_config: Any) -> OnlineStore: + """Creates an online store corresponding to the given online store config.""" module_name = online_store_config.__module__ qualified_name = type(online_store_config).__name__ - store_class_name = qualified_name.replace("Config", "") - try: - module = importlib.import_module(module_name) - except Exception as e: - # The original exception can be anything - either module not found, - # or any other kind of error happening during the module import time. - # So we should include the original error as well in the stack trace. - raise errors.FeastModuleImportError(module_name, "OnlineStore") from e - - # Try getting the provider class definition - try: - online_store_class = getattr(module, store_class_name) - except AttributeError: - # This can only be one type of error, when class_name attribute does not exist in the module - # So we don't have to include the original exception here - raise errors.FeastClassImportError( - module_name, store_class_name, class_type="OnlineStore" - ) from None + class_name = qualified_name.replace("Config", "") + online_store_class = import_class(module_name, class_name, "OnlineStore") return online_store_class() -def _redis_key(project: str, entity_key: EntityKeyProto): - redis_key = RedisKeyProto( - project=project, - entity_names=entity_key.join_keys, - entity_values=entity_key.entity_values, - ) - return redis_key.SerializeToString() +def _redis_key(project: str, entity_key: EntityKeyProto) -> bytes: + key: List[bytes] = [serialize_entity_key(entity_key), project.encode("utf-8")] + return b"".join(key) + + +def _redis_key_prefix(entity_keys: List[str]) -> bytes: + return serialize_entity_key_prefix(entity_keys) def _mmh3(key: str): diff --git a/sdk/python/feast/infra/online_stores/online_store.py b/sdk/python/feast/infra/online_stores/online_store.py index 8050d07f00..04c6a065fb 100644 --- a/sdk/python/feast/infra/online_stores/online_store.py +++ b/sdk/python/feast/infra/online_stores/online_store.py @@ -14,10 +14,12 @@ from abc import ABC, abstractmethod from datetime import datetime -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple -from feast import Entity, FeatureTable +from feast import Entity from feast.feature_view import FeatureView +from feast.infra.infra_object import InfraObject +from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto from feast.protos.feast.types.Value_pb2 import Value as ValueProto from feast.repo_config import RepoConfig @@ -33,7 +35,7 @@ class OnlineStore(ABC): def online_write_batch( self, config: RepoConfig, - table: Union[FeatureTable, FeatureView], + table: FeatureView, data: List[ Tuple[EntityKeyProto, Dict[str, ValueProto], datetime, Optional[datetime]] ], @@ -47,7 +49,7 @@ def online_write_batch( Args: config: The RepoConfig for the current FeatureStore. - table: Feast FeatureTable or FeatureView + table: Feast FeatureView data: a list of quadruplets containing Feature data. Each quadruplet contains an Entity Key, a dict containing feature values, an event timestamp for the row, and the created timestamp for the row if it exists. @@ -60,7 +62,7 @@ def online_write_batch( def online_read( self, config: RepoConfig, - table: Union[FeatureTable, FeatureView], + table: FeatureView, entity_keys: List[EntityKeyProto], requested_features: Optional[List[str]] = None, ) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]: @@ -70,13 +72,13 @@ def online_read( Args: config: The RepoConfig for the current FeatureStore. - table: Feast FeatureTable or FeatureView + table: Feast FeatureView entity_keys: a list of entity keys that should be read from the FeatureStore. requested_features: (Optional) A subset of the features that should be read from the FeatureStore. Returns: - Data is returned as a list, one item per entity key. Each item in the list is a tuple - of event_ts for the row, and the feature data as a dict from feature names to values. - Values are returned as Value proto message. + Data is returned as a list, one item per entity key in the original order as the entity_keys argument. + Each item in the list is a tuple of event_ts for the row, and the feature data as a dict from feature names + to values. Values are returned as Value proto message. """ ... @@ -84,19 +86,31 @@ def online_read( def update( self, config: RepoConfig, - tables_to_delete: Sequence[Union[FeatureTable, FeatureView]], - tables_to_keep: Sequence[Union[FeatureTable, FeatureView]], + tables_to_delete: Sequence[FeatureView], + tables_to_keep: Sequence[FeatureView], entities_to_delete: Sequence[Entity], entities_to_keep: Sequence[Entity], partial: bool, ): ... + def plan( + self, config: RepoConfig, desired_registry_proto: RegistryProto + ) -> List[InfraObject]: + """ + Returns the set of InfraObjects required to support the desired registry. + + Args: + config: The RepoConfig for the current FeatureStore. + desired_registry_proto: The desired registry, in proto form. + """ + return [] + @abstractmethod def teardown( self, config: RepoConfig, - tables: Sequence[Union[FeatureTable, FeatureView]], + tables: Sequence[FeatureView], entities: Sequence[Entity], ): ... diff --git a/sdk/python/feast/infra/online_stores/redis.py b/sdk/python/feast/infra/online_stores/redis.py index a226c5cd18..9ceceff0ac 100644 --- a/sdk/python/feast/infra/online_stores/redis.py +++ b/sdk/python/feast/infra/online_stores/redis.py @@ -12,30 +12,43 @@ # See the License for the specific language governing permissions and # limitations under the License. import json +import logging from datetime import datetime from enum import Enum -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - +from typing import ( + Any, + ByteString, + Callable, + Dict, + List, + Optional, + Sequence, + Tuple, + Union, +) + +import pytz from google.protobuf.timestamp_pb2 import Timestamp from pydantic import StrictStr from pydantic.typing import Literal -from feast import Entity, FeatureTable, FeatureView, RepoConfig, utils -from feast.infra.online_stores.helpers import _mmh3, _redis_key +from feast import Entity, FeatureView, RepoConfig, utils +from feast.infra.online_stores.helpers import _mmh3, _redis_key, _redis_key_prefix from feast.infra.online_stores.online_store import OnlineStore from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto from feast.protos.feast.types.Value_pb2 import Value as ValueProto from feast.repo_config import FeastConfigBaseModel +from feast.usage import log_exceptions_and_usage, tracing_span try: from redis import Redis - from rediscluster import RedisCluster + from redis.cluster import ClusterNode, RedisCluster except ImportError as e: from feast.errors import FeastExtrasDependencyImportError raise FeastExtrasDependencyImportError("redis", str(e)) -EX_SECONDS = 253402300799 +logger = logging.getLogger(__name__) class RedisType(str, Enum): @@ -56,34 +69,63 @@ class RedisOnlineStoreConfig(FeastConfigBaseModel): """Connection string containing the host, port, and configuration parameters for Redis format: host:port,parameter1,parameter2 eg. redis:6379,db=0 """ + key_ttl_seconds: Optional[int] = None + """(Optional) redis key bin ttl (in seconds) for expiring entities""" + class RedisOnlineStore(OnlineStore): _client: Optional[Union[Redis, RedisCluster]] = None + def delete_entity_values(self, config: RepoConfig, join_keys: List[str]): + client = self._get_client(config.online_store) + deleted_count = 0 + pipeline = client.pipeline(transaction=False) + prefix = _redis_key_prefix(join_keys) + + for _k in client.scan_iter( + b"".join([prefix, b"*", config.project.encode("utf8")]) + ): + pipeline.delete(_k) + deleted_count += 1 + pipeline.execute() + + logger.debug(f"Deleted {deleted_count} rows for entity {', '.join(join_keys)}") + + @log_exceptions_and_usage(online_store="redis") def update( self, config: RepoConfig, - tables_to_delete: Sequence[Union[FeatureTable, FeatureView]], - tables_to_keep: Sequence[Union[FeatureTable, FeatureView]], + tables_to_delete: Sequence[FeatureView], + tables_to_keep: Sequence[FeatureView], entities_to_delete: Sequence[Entity], entities_to_keep: Sequence[Entity], partial: bool, ): """ - There's currently no setup done for Redis. + Look for join_keys (list of entities) that are not in use anymore + (usually this happens when the last feature view that was using specific compound key is deleted) + and remove all features attached to this "join_keys". """ - pass + join_keys_to_keep = set(tuple(table.entities) for table in tables_to_keep) + + join_keys_to_delete = set(tuple(table.entities) for table in tables_to_delete) + + for join_keys in join_keys_to_delete - join_keys_to_keep: + self.delete_entity_values(config, list(join_keys)) def teardown( self, config: RepoConfig, - tables: Sequence[Union[FeatureTable, FeatureView]], + tables: Sequence[FeatureView], entities: Sequence[Entity], ): """ - There's currently no teardown done for Redis. + We delete the keys in redis for tables/views being removed. """ - pass + join_keys_to_delete = set(tuple(table.entities) for table in tables) + + for join_keys in join_keys_to_delete: + self.delete_entity_values(config, list(join_keys)) @staticmethod def _parse_connection_string(connection_string: str): @@ -121,8 +163,10 @@ def _get_client(self, online_store_config: RedisOnlineStoreConfig): startup_nodes, kwargs = self._parse_connection_string( online_store_config.connection_string ) - if online_store_config.type == RedisType.redis_cluster: - kwargs["startup_nodes"] = startup_nodes + if online_store_config.redis_type == RedisType.redis_cluster: + kwargs["startup_nodes"] = [ + ClusterNode(**node) for node in startup_nodes + ] self._client = RedisCluster(**kwargs) else: kwargs["host"] = startup_nodes[0]["host"] @@ -130,10 +174,11 @@ def _get_client(self, online_store_config: RedisOnlineStoreConfig): self._client = Redis(**kwargs) return self._client + @log_exceptions_and_usage(online_store="redis") def online_write_batch( self, config: RepoConfig, - table: Union[FeatureTable, FeatureView], + table: FeatureView, data: List[ Tuple[EntityKeyProto, Dict[str, ValueProto], datetime, Optional[datetime]] ], @@ -145,32 +190,61 @@ def online_write_batch( client = self._get_client(online_store_config) project = config.project - entity_hset = {} feature_view = table.name - - ex = Timestamp() - ex.seconds = EX_SECONDS - ex_str = ex.SerializeToString() - - for entity_key, values, timestamp, created_ts in data: - redis_key_bin = _redis_key(project, entity_key) - ts = Timestamp() - ts.seconds = int(utils.make_tzaware(timestamp).timestamp()) - entity_hset[f"_ts:{feature_view}"] = ts.SerializeToString() - entity_hset[f"_ex:{feature_view}"] = ex_str - - for feature_name, val in values.items(): - f_key = _mmh3(f"{feature_view}:{feature_name}") - entity_hset[f_key] = val.SerializeToString() - - client.hset(redis_key_bin, mapping=entity_hset) + ts_key = f"_ts:{feature_view}" + keys = [] + # redis pipelining optimization: send multiple commands to redis server without waiting for every reply + with client.pipeline(transaction=False) as pipe: + # check if a previous record under the key bin exists + # TODO: investigate if check and set is a better approach rather than pulling all entity ts and then setting + # it may be significantly slower but avoids potential (rare) race conditions + for entity_key, _, _, _ in data: + redis_key_bin = _redis_key(project, entity_key) + keys.append(redis_key_bin) + pipe.hmget(redis_key_bin, ts_key) + prev_event_timestamps = pipe.execute() + # flattening the list of lists. `hmget` does the lookup assuming a list of keys in the key bin + prev_event_timestamps = [i[0] for i in prev_event_timestamps] + + for redis_key_bin, prev_event_time, (_, values, timestamp, _) in zip( + keys, prev_event_timestamps, data + ): + event_time_seconds = int(utils.make_tzaware(timestamp).timestamp()) + + # ignore if event_timestamp is before the event features that are currently in the feature store + if prev_event_time: + prev_ts = Timestamp() + prev_ts.ParseFromString(prev_event_time) + if prev_ts.seconds and event_time_seconds <= prev_ts.seconds: + # TODO: somehow signal that it's not overwriting the current record? + if progress: + progress(1) + continue + + ts = Timestamp() + ts.seconds = event_time_seconds + entity_hset = dict() + entity_hset[ts_key] = ts.SerializeToString() + + for feature_name, val in values.items(): + f_key = _mmh3(f"{feature_view}:{feature_name}") + entity_hset[f_key] = val.SerializeToString() + + pipe.hset(redis_key_bin, mapping=entity_hset) + + if online_store_config.key_ttl_seconds: + pipe.expire( + name=redis_key_bin, time=online_store_config.key_ttl_seconds + ) + results = pipe.execute() if progress: - progress(1) + progress(len(results)) + @log_exceptions_and_usage(online_store="redis") def online_read( self, config: RepoConfig, - table: Union[FeatureTable, FeatureView], + table: FeatureView, entity_keys: List[EntityKeyProto], requested_features: Optional[List[str]] = None, ) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]: @@ -186,30 +260,50 @@ def online_read( if not requested_features: requested_features = [f.name for f in table.features] + hset_keys = [_mmh3(f"{feature_view}:{k}") for k in requested_features] + + ts_key = f"_ts:{feature_view}" + hset_keys.append(ts_key) + requested_features.append(ts_key) + + keys = [] for entity_key in entity_keys: redis_key_bin = _redis_key(project, entity_key) - hset_keys = [_mmh3(f"{feature_view}:{k}") for k in requested_features] - ts_key = f"_ts:{feature_view}" - hset_keys.append(ts_key) - values = client.hmget(redis_key_bin, hset_keys) - requested_features.append(ts_key) - res_val = dict(zip(requested_features, values)) - - res_ts = Timestamp() - ts_val = res_val.pop(ts_key) - if ts_val: - res_ts.ParseFromString(ts_val) - - res = {} - for feature_name, val_bin in res_val.items(): - val = ValueProto() - if val_bin: - val.ParseFromString(val_bin) - res[feature_name] = val - - if not res: - result.append((None, None)) - else: - timestamp = datetime.fromtimestamp(res_ts.seconds) - result.append((timestamp, res)) + keys.append(redis_key_bin) + with client.pipeline(transaction=False) as pipe: + for redis_key_bin in keys: + pipe.hmget(redis_key_bin, hset_keys) + with tracing_span(name="remote_call"): + redis_values = pipe.execute() + for values in redis_values: + features = self._get_features_for_entity( + values, feature_view, requested_features + ) + result.append(features) return result + + def _get_features_for_entity( + self, + values: List[ByteString], + feature_view: str, + requested_features: List[str], + ) -> Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]: + res_val = dict(zip(requested_features, values)) + + res_ts = Timestamp() + ts_val = res_val.pop(f"_ts:{feature_view}") + if ts_val: + res_ts.ParseFromString(bytes(ts_val)) + + res = {} + for feature_name, val_bin in res_val.items(): + val = ValueProto() + if val_bin: + val.ParseFromString(bytes(val_bin)) + res[feature_name] = val + + if not res: + return None, None + else: + timestamp = datetime.fromtimestamp(res_ts.seconds, tz=pytz.utc) + return timestamp, res diff --git a/sdk/python/feast/infra/online_stores/sqlite.py b/sdk/python/feast/infra/online_stores/sqlite.py index dbd837c5df..5657fbe372 100644 --- a/sdk/python/feast/infra/online_stores/sqlite.py +++ b/sdk/python/feast/infra/online_stores/sqlite.py @@ -11,28 +11,33 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - +import itertools import os import sqlite3 from datetime import datetime from pathlib import Path -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple -import pytz from pydantic import StrictStr from pydantic.schema import Literal -from feast import Entity, FeatureTable +from feast import Entity from feast.feature_view import FeatureView +from feast.infra.infra_object import SQLITE_INFRA_OBJECT_CLASS_TYPE, InfraObject from feast.infra.key_encoding_utils import serialize_entity_key from feast.infra.online_stores.online_store import OnlineStore +from feast.protos.feast.core.InfraObject_pb2 import InfraObject as InfraObjectProto +from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto +from feast.protos.feast.core.SqliteTable_pb2 import SqliteTable as SqliteTableProto from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto from feast.protos.feast.types.Value_pb2 import Value as ValueProto from feast.repo_config import FeastConfigBaseModel, RepoConfig +from feast.usage import log_exceptions_and_usage, tracing_span +from feast.utils import to_naive_utc class SqliteOnlineStoreConfig(FeastConfigBaseModel): - """ Online store config for local (SQLite-based) store """ + """Online store config for local (SQLite-based) store""" type: Literal[ "sqlite", "feast.infra.online_stores.sqlite.SqliteOnlineStore" @@ -47,6 +52,9 @@ class SqliteOnlineStore(OnlineStore): """ OnlineStore is an object used for all interaction between Feast and the service used for offline storage of features. + + Attributes: + _conn: SQLite connection. """ _conn: Optional[sqlite3.Connection] = None @@ -67,16 +75,14 @@ def _get_db_path(config: RepoConfig) -> str: def _get_conn(self, config: RepoConfig): if not self._conn: db_path = self._get_db_path(config) - Path(db_path).parent.mkdir(exist_ok=True) - self._conn = sqlite3.connect( - db_path, detect_types=sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES, - ) + self._conn = _initialize_conn(db_path) return self._conn + @log_exceptions_and_usage(online_store="sqlite") def online_write_batch( self, config: RepoConfig, - table: Union[FeatureTable, FeatureView], + table: FeatureView, data: List[ Tuple[EntityKeyProto, Dict[str, ValueProto], datetime, Optional[datetime]] ], @@ -90,9 +96,9 @@ def online_write_batch( with conn: for entity_key, values, timestamp, created_ts in data: entity_key_bin = serialize_entity_key(entity_key) - timestamp = _to_naive_utc(timestamp) + timestamp = to_naive_utc(timestamp) if created_ts is not None: - created_ts = _to_naive_utc(created_ts) + created_ts = to_naive_utc(created_ts) for feature_name, val in values.items(): conn.execute( @@ -127,31 +133,38 @@ def online_write_batch( if progress: progress(1) + @log_exceptions_and_usage(online_store="sqlite") def online_read( self, config: RepoConfig, - table: Union[FeatureTable, FeatureView], + table: FeatureView, entity_keys: List[EntityKeyProto], requested_features: Optional[List[str]] = None, ) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]: - pass conn = self._get_conn(config) cur = conn.cursor() result: List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]] = [] - project = config.project - for entity_key in entity_keys: - entity_key_bin = serialize_entity_key(entity_key) - + with tracing_span(name="remote_call"): + # Fetch all entities in one go cur.execute( - f"SELECT feature_name, value, event_ts FROM {_table_id(project, table)} WHERE entity_key = ?", - (entity_key_bin,), + f"SELECT entity_key, feature_name, value, event_ts " + f"FROM {_table_id(config.project, table)} " + f"WHERE entity_key IN ({','.join('?' * len(entity_keys))}) " + f"ORDER BY entity_key", + [serialize_entity_key(entity_key) for entity_key in entity_keys], ) + rows = cur.fetchall() + rows = { + k: list(group) for k, group in itertools.groupby(rows, key=lambda r: r[0]) + } + for entity_key in entity_keys: + entity_key_bin = serialize_entity_key(entity_key) res = {} res_ts = None - for feature_name, val_bin, ts in cur.fetchall(): + for _, feature_name, val_bin, ts in rows.get(entity_key_bin, []): val = ValueProto() val.ParseFromString(val_bin) res[feature_name] = val @@ -163,11 +176,12 @@ def online_read( result.append((res_ts, res)) return result + @log_exceptions_and_usage(online_store="sqlite") def update( self, config: RepoConfig, - tables_to_delete: Sequence[Union[FeatureTable, FeatureView]], - tables_to_keep: Sequence[Union[FeatureTable, FeatureView]], + tables_to_delete: Sequence[FeatureView], + tables_to_keep: Sequence[FeatureView], entities_to_delete: Sequence[Entity], entities_to_keep: Sequence[Entity], partial: bool, @@ -186,21 +200,95 @@ def update( for table in tables_to_delete: conn.execute(f"DROP TABLE IF EXISTS {_table_id(project, table)}") + @log_exceptions_and_usage(online_store="sqlite") + def plan( + self, config: RepoConfig, desired_registry_proto: RegistryProto + ) -> List[InfraObject]: + project = config.project + + infra_objects: List[InfraObject] = [ + SqliteTable( + path=self._get_db_path(config), + name=_table_id(project, FeatureView.from_proto(view)), + ) + for view in desired_registry_proto.feature_views + ] + return infra_objects + def teardown( self, config: RepoConfig, - tables: Sequence[Union[FeatureTable, FeatureView]], + tables: Sequence[FeatureView], entities: Sequence[Entity], ): - os.unlink(self._get_db_path(config)) + try: + os.unlink(self._get_db_path(config)) + except FileNotFoundError: + pass + +def _initialize_conn(db_path: str): + Path(db_path).parent.mkdir(exist_ok=True) + return sqlite3.connect( + db_path, + detect_types=sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES, + check_same_thread=False, + ) -def _table_id(project: str, table: Union[FeatureTable, FeatureView]) -> str: + +def _table_id(project: str, table: FeatureView) -> str: return f"{project}_{table.name}" -def _to_naive_utc(ts: datetime): - if ts.tzinfo is None: - return ts - else: - return ts.astimezone(pytz.utc).replace(tzinfo=None) +class SqliteTable(InfraObject): + """ + A Sqlite table managed by Feast. + + Attributes: + path: The absolute path of the Sqlite file. + name: The name of the table. + conn: SQLite connection. + """ + + path: str + conn: sqlite3.Connection + + def __init__(self, path: str, name: str): + super().__init__(name) + self.path = path + self.conn = _initialize_conn(path) + + def to_infra_object_proto(self) -> InfraObjectProto: + sqlite_table_proto = self.to_proto() + return InfraObjectProto( + infra_object_class_type=SQLITE_INFRA_OBJECT_CLASS_TYPE, + sqlite_table=sqlite_table_proto, + ) + + def to_proto(self) -> Any: + sqlite_table_proto = SqliteTableProto() + sqlite_table_proto.path = self.path + sqlite_table_proto.name = self.name + return sqlite_table_proto + + @staticmethod + def from_infra_object_proto(infra_object_proto: InfraObjectProto) -> Any: + return SqliteTable( + path=infra_object_proto.sqlite_table.path, + name=infra_object_proto.sqlite_table.name, + ) + + @staticmethod + def from_proto(sqlite_table_proto: SqliteTableProto) -> Any: + return SqliteTable(path=sqlite_table_proto.path, name=sqlite_table_proto.name,) + + def update(self): + self.conn.execute( + f"CREATE TABLE IF NOT EXISTS {self.name} (entity_key BLOB, feature_name TEXT, value BLOB, event_ts timestamp, created_ts timestamp, PRIMARY KEY(entity_key, feature_name))" + ) + self.conn.execute( + f"CREATE INDEX IF NOT EXISTS {self.name}_ek ON {self.name} (entity_key);" + ) + + def teardown(self): + self.conn.execute(f"DROP TABLE IF EXISTS {self.name}") diff --git a/sdk/python/feast/infra/passthrough_provider.py b/sdk/python/feast/infra/passthrough_provider.py new file mode 100644 index 0000000000..09ca98d86d --- /dev/null +++ b/sdk/python/feast/infra/passthrough_provider.py @@ -0,0 +1,216 @@ +from datetime import datetime, timedelta +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +import pandas +import pyarrow as pa +from tqdm import tqdm + +from feast.entity import Entity +from feast.feature_view import FeatureView +from feast.infra.offline_stores.offline_store import RetrievalJob +from feast.infra.offline_stores.offline_utils import get_offline_store_from_config +from feast.infra.online_stores.helpers import get_online_store_from_config +from feast.infra.provider import ( + Provider, + _convert_arrow_to_proto, + _get_column_names, + _run_field_mapping, +) +from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto +from feast.protos.feast.types.Value_pb2 import Value as ValueProto +from feast.registry import Registry +from feast.repo_config import RepoConfig +from feast.saved_dataset import SavedDataset +from feast.usage import RatioSampler, log_exceptions_and_usage, set_usage_attribute +from feast.utils import make_tzaware + +DEFAULT_BATCH_SIZE = 10_000 + + +class PassthroughProvider(Provider): + """ + The Passthrough provider delegates all operations to the underlying online and offline stores. + """ + + def __init__(self, config: RepoConfig): + super().__init__(config) + + self.repo_config = config + self.offline_store = get_offline_store_from_config(config.offline_store) + self.online_store = ( + get_online_store_from_config(config.online_store) + if config.online_store + else None + ) + + def update_infra( + self, + project: str, + tables_to_delete: Sequence[FeatureView], + tables_to_keep: Sequence[FeatureView], + entities_to_delete: Sequence[Entity], + entities_to_keep: Sequence[Entity], + partial: bool, + ): + set_usage_attribute("provider", self.__class__.__name__) + + # Call update only if there is an online store + if self.online_store: + self.online_store.update( + config=self.repo_config, + tables_to_delete=tables_to_delete, + tables_to_keep=tables_to_keep, + entities_to_keep=entities_to_keep, + entities_to_delete=entities_to_delete, + partial=partial, + ) + + def teardown_infra( + self, project: str, tables: Sequence[FeatureView], entities: Sequence[Entity], + ) -> None: + set_usage_attribute("provider", self.__class__.__name__) + if self.online_store: + self.online_store.teardown(self.repo_config, tables, entities) + + def online_write_batch( + self, + config: RepoConfig, + table: FeatureView, + data: List[ + Tuple[EntityKeyProto, Dict[str, ValueProto], datetime, Optional[datetime]] + ], + progress: Optional[Callable[[int], Any]], + ) -> None: + set_usage_attribute("provider", self.__class__.__name__) + if self.online_store: + self.online_store.online_write_batch(config, table, data, progress) + + @log_exceptions_and_usage(sampler=RatioSampler(ratio=0.001)) + def online_read( + self, + config: RepoConfig, + table: FeatureView, + entity_keys: List[EntityKeyProto], + requested_features: List[str] = None, + ) -> List: + set_usage_attribute("provider", self.__class__.__name__) + result = [] + if self.online_store: + result = self.online_store.online_read( + config, table, entity_keys, requested_features + ) + return result + + def ingest_df( + self, feature_view: FeatureView, entities: List[Entity], df: pandas.DataFrame, + ): + set_usage_attribute("provider", self.__class__.__name__) + table = pa.Table.from_pandas(df) + + if feature_view.batch_source.field_mapping is not None: + table = _run_field_mapping(table, feature_view.batch_source.field_mapping) + + join_keys = {entity.join_key: entity.value_type for entity in entities} + rows_to_write = _convert_arrow_to_proto(table, feature_view, join_keys) + + self.online_write_batch( + self.repo_config, feature_view, rows_to_write, progress=None + ) + + def materialize_single_feature_view( + self, + config: RepoConfig, + feature_view: FeatureView, + start_date: datetime, + end_date: datetime, + registry: Registry, + project: str, + tqdm_builder: Callable[[int], tqdm], + ) -> None: + set_usage_attribute("provider", self.__class__.__name__) + + entities = [] + for entity_name in feature_view.entities: + entities.append(registry.get_entity(entity_name, project)) + + ( + join_key_columns, + feature_name_columns, + timestamp_field, + created_timestamp_column, + ) = _get_column_names(feature_view, entities) + + offline_job = self.offline_store.pull_latest_from_table_or_query( + config=config, + data_source=feature_view.batch_source, + join_key_columns=join_key_columns, + feature_name_columns=feature_name_columns, + timestamp_field=timestamp_field, + created_timestamp_column=created_timestamp_column, + start_date=start_date, + end_date=end_date, + ) + + table = offline_job.to_arrow() + + if feature_view.batch_source.field_mapping is not None: + table = _run_field_mapping(table, feature_view.batch_source.field_mapping) + + join_keys = {entity.join_key: entity.value_type for entity in entities} + + with tqdm_builder(table.num_rows) as pbar: + for batch in table.to_batches(DEFAULT_BATCH_SIZE): + rows_to_write = _convert_arrow_to_proto(batch, feature_view, join_keys) + self.online_write_batch( + self.repo_config, + feature_view, + rows_to_write, + lambda x: pbar.update(x), + ) + + def get_historical_features( + self, + config: RepoConfig, + feature_views: List[FeatureView], + feature_refs: List[str], + entity_df: Union[pandas.DataFrame, str], + registry: Registry, + project: str, + full_feature_names: bool, + ) -> RetrievalJob: + set_usage_attribute("provider", self.__class__.__name__) + + job = self.offline_store.get_historical_features( + config=config, + feature_views=feature_views, + feature_refs=feature_refs, + entity_df=entity_df, + registry=registry, + project=project, + full_feature_names=full_feature_names, + ) + + return job + + def retrieve_saved_dataset( + self, config: RepoConfig, dataset: SavedDataset + ) -> RetrievalJob: + set_usage_attribute("provider", self.__class__.__name__) + + feature_name_columns = [ + ref.replace(":", "__") if dataset.full_feature_names else ref.split(":")[1] + for ref in dataset.features + ] + + # ToDo: replace hardcoded value + event_ts_column = "event_timestamp" + + return self.offline_store.pull_all_from_table_or_query( + config=config, + data_source=dataset.storage.to_data_source(), + join_key_columns=dataset.join_keys, + feature_name_columns=feature_name_columns, + timestamp_field=event_ts_column, + start_date=make_tzaware(dataset.min_event_timestamp), # type: ignore + end_date=make_tzaware(dataset.max_event_timestamp + timedelta(seconds=1)), # type: ignore + ) diff --git a/sdk/python/feast/infra/provider.py b/sdk/python/feast/infra/provider.py index f778032c17..a71bd6d2d0 100644 --- a/sdk/python/feast/infra/provider.py +++ b/sdk/python/feast/infra/provider.py @@ -1,33 +1,48 @@ import abc +from collections import defaultdict from datetime import datetime from pathlib import Path from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import dask.dataframe as dd import pandas import pyarrow from tqdm import tqdm -from feast import errors, importer +from feast import errors from feast.entity import Entity -from feast.feature_table import FeatureTable -from feast.feature_view import FeatureView +from feast.feature_view import DUMMY_ENTITY_ID, FeatureView +from feast.importer import import_class +from feast.infra.infra_object import Infra from feast.infra.offline_stores.offline_store import RetrievalJob +from feast.on_demand_feature_view import OnDemandFeatureView +from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto from feast.protos.feast.types.Value_pb2 import Value as ValueProto from feast.registry import Registry from feast.repo_config import RepoConfig -from feast.type_map import python_value_to_proto_value +from feast.saved_dataset import SavedDataset +from feast.type_map import python_values_to_proto_values +from feast.value_type import ValueType -DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL = "event_timestamp" +PROVIDERS_CLASS_FOR_TYPE = { + "gcp": "feast.infra.gcp.GcpProvider", + "aws": "feast.infra.aws.AwsProvider", + "local": "feast.infra.local.LocalProvider", +} class Provider(abc.ABC): + @abc.abstractmethod + def __init__(self, config: RepoConfig): + ... + @abc.abstractmethod def update_infra( self, project: str, - tables_to_delete: Sequence[Union[FeatureTable, FeatureView]], - tables_to_keep: Sequence[Union[FeatureTable, FeatureView]], + tables_to_delete: Sequence[FeatureView], + tables_to_keep: Sequence[FeatureView], entities_to_delete: Sequence[Entity], entities_to_keep: Sequence[Entity], partial: bool, @@ -50,12 +65,21 @@ def update_infra( """ ... + def plan_infra( + self, config: RepoConfig, desired_registry_proto: RegistryProto + ) -> Infra: + """ + Returns the Infra required to support the desired registry. + + Args: + config: The RepoConfig for the current FeatureStore. + desired_registry_proto: The desired registry, in proto form. + """ + return Infra() + @abc.abstractmethod def teardown_infra( - self, - project: str, - tables: Sequence[Union[FeatureTable, FeatureView]], - entities: Sequence[Entity], + self, project: str, tables: Sequence[FeatureView], entities: Sequence[Entity], ): """ Tear down all cloud resources for a repo. @@ -71,7 +95,7 @@ def teardown_infra( def online_write_batch( self, config: RepoConfig, - table: Union[FeatureTable, FeatureView], + table: FeatureView, data: List[ Tuple[EntityKeyProto, Dict[str, ValueProto], datetime, Optional[datetime]] ], @@ -85,7 +109,7 @@ def online_write_batch( Args: config: The RepoConfig for the current FeatureStore. - table: Feast FeatureTable + table: Feast FeatureView data: a list of quadruplets containing Feature data. Each quadruplet contains an Entity Key, a dict containing feature values, an event timestamp for the row, and the created timestamp for the row if it exists. @@ -94,6 +118,14 @@ def online_write_batch( """ ... + def ingest_df( + self, feature_view: FeatureView, entities: List[Entity], df: pandas.DataFrame, + ): + """ + Ingests a DataFrame directly into the online store + """ + pass + @abc.abstractmethod def materialize_single_feature_view( self, @@ -124,7 +156,7 @@ def get_historical_features( def online_read( self, config: RepoConfig, - table: Union[FeatureTable, FeatureView], + table: FeatureView, entity_keys: List[EntityKeyProto], requested_features: List[str] = None, ) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]: @@ -139,40 +171,56 @@ def online_read( """ ... + @abc.abstractmethod + def retrieve_saved_dataset( + self, config: RepoConfig, dataset: SavedDataset + ) -> RetrievalJob: + """ + Read saved dataset from offline store. + All parameters for retrieval (like path, datetime boundaries, column names for both keys and features, etc) + are determined from SavedDataset object. -def get_provider(config: RepoConfig, repo_path: Path) -> Provider: - if "." not in config.provider: - if config.provider == "gcp": - from feast.infra.gcp import GcpProvider + Returns: + RetrievalJob object, which is lazy wrapper for actual query performed under the hood. + + """ + ... - return GcpProvider(config) - elif config.provider == "aws": - from feast.infra.aws import AwsProvider + def get_feature_server_endpoint(self) -> Optional[str]: + """Returns endpoint for the feature server, if it exists.""" + return None - return AwsProvider(config) - elif config.provider == "local": - from feast.infra.local import LocalProvider - return LocalProvider(config) - else: +def get_provider(config: RepoConfig, repo_path: Path) -> Provider: + if "." not in config.provider: + if config.provider not in PROVIDERS_CLASS_FOR_TYPE: raise errors.FeastProviderNotImplementedError(config.provider) + + provider = PROVIDERS_CLASS_FOR_TYPE[config.provider] else: - # Split provider into module and class names by finding the right-most dot. - # For example, provider 'foo.bar.MyProvider' will be parsed into 'foo.bar' and 'MyProvider' - module_name, class_name = config.provider.rsplit(".", 1) + provider = config.provider + + # Split provider into module and class names by finding the right-most dot. + # For example, provider 'foo.bar.MyProvider' will be parsed into 'foo.bar' and 'MyProvider' + module_name, class_name = provider.rsplit(".", 1) - cls = importer.get_class_from_type(module_name, class_name, "Provider") + cls = import_class(module_name, class_name, "Provider") - return cls(config, repo_path) + return cls(config) def _get_requested_feature_views_to_features_dict( - feature_refs: List[str], feature_views: List[FeatureView] -) -> Dict[FeatureView, List[str]]: + feature_refs: List[str], + feature_views: List[FeatureView], + on_demand_feature_views: List[OnDemandFeatureView], +) -> Tuple[Dict[FeatureView, List[str]], Dict[OnDemandFeatureView, List[str]]]: """Create a dict of FeatureView -> List[Feature] for all requested features. Set full_feature_names to True to have feature names prefixed by their feature view name.""" - feature_views_to_feature_map: Dict[FeatureView, List[str]] = {} + feature_views_to_feature_map: Dict[FeatureView, List[str]] = defaultdict(list) + on_demand_feature_views_to_feature_map: Dict[ + OnDemandFeatureView, List[str] + ] = defaultdict(list) for ref in feature_refs: ref_parts = ref.split(":") @@ -180,22 +228,19 @@ def _get_requested_feature_views_to_features_dict( feature_from_ref = ref_parts[1] found = False - for feature_view_from_registry in feature_views: - if feature_view_from_registry.name == feature_view_from_ref: + for fv in feature_views: + if fv.projection.name_to_use() == feature_view_from_ref: found = True - if feature_view_from_registry in feature_views_to_feature_map: - feature_views_to_feature_map[feature_view_from_registry].append( - feature_from_ref - ) - else: - feature_views_to_feature_map[feature_view_from_registry] = [ - feature_from_ref - ] + feature_views_to_feature_map[fv].append(feature_from_ref) + for odfv in on_demand_feature_views: + if odfv.projection.name_to_use() == feature_view_from_ref: + found = True + on_demand_feature_views_to_feature_map[odfv].append(feature_from_ref) if not found: raise ValueError(f"Could not find feature view from reference {ref}") - return feature_views_to_feature_map + return feature_views_to_feature_map, on_demand_feature_views_to_feature_map def _get_column_names( @@ -213,18 +258,20 @@ def _get_column_names( the query to the offline store. """ # if we have mapped fields, use the original field names in the call to the offline store - event_timestamp_column = feature_view.input.event_timestamp_column + timestamp_field = feature_view.batch_source.timestamp_field feature_names = [feature.name for feature in feature_view.features] - created_timestamp_column = feature_view.input.created_timestamp_column - join_keys = [entity.join_key for entity in entities] - if feature_view.input.field_mapping is not None: + created_timestamp_column = feature_view.batch_source.created_timestamp_column + join_keys = [ + entity.join_key for entity in entities if entity.join_key != DUMMY_ENTITY_ID + ] + if feature_view.batch_source.field_mapping is not None: reverse_field_mapping = { - v: k for k, v in feature_view.input.field_mapping.items() + v: k for k, v in feature_view.batch_source.field_mapping.items() } - event_timestamp_column = ( - reverse_field_mapping[event_timestamp_column] - if event_timestamp_column in reverse_field_mapping.keys() - else event_timestamp_column + timestamp_field = ( + reverse_field_mapping[timestamp_field] + if timestamp_field in reverse_field_mapping.keys() + else timestamp_field ) created_timestamp_column = ( reverse_field_mapping[created_timestamp_column] @@ -240,10 +287,20 @@ def _get_column_names( reverse_field_mapping[col] if col in reverse_field_mapping.keys() else col for col in feature_names ] + + # We need to exclude join keys and timestamp columns from the list of features, after they are mapped to + # their final column names via the `field_mapping` field of the source. + feature_names = [ + name + for name in feature_names + if name not in join_keys + and name != timestamp_field + and name != created_timestamp_column + ] return ( join_keys, feature_names, - event_timestamp_column, + timestamp_field, created_timestamp_column, ) @@ -260,53 +317,88 @@ def _run_field_mapping( return table +def _run_dask_field_mapping( + table: dd.DataFrame, field_mapping: Dict[str, str], +): + if field_mapping: + # run field mapping in the forward direction + table = table.rename(columns=field_mapping) + table = table.persist() + + return table + + +def _coerce_datetime(ts): + """ + Depending on underlying time resolution, arrow to_pydict() sometimes returns pandas + timestamp type (for nanosecond resolution), and sometimes you get standard python datetime + (for microsecond resolution). + While pandas timestamp class is a subclass of python datetime, it doesn't always behave the + same way. We convert it to normal datetime so that consumers downstream don't have to deal + with these quirks. + """ + if isinstance(ts, pandas.Timestamp): + return ts.to_pydatetime() + else: + return ts + + def _convert_arrow_to_proto( - table: pyarrow.Table, feature_view: FeatureView, join_keys: List[str], + table: Union[pyarrow.Table, pyarrow.RecordBatch], + feature_view: FeatureView, + join_keys: Dict[str, ValueType], ) -> List[Tuple[EntityKeyProto, Dict[str, ValueProto], datetime, Optional[datetime]]]: - rows_to_write = [] + # Avoid ChunkedArrays which guarentees `zero_copy_only` availiable. + if isinstance(table, pyarrow.Table): + table = table.to_batches()[0] - def _coerce_datetime(ts): - """ - Depending on underlying time resolution, arrow to_pydict() sometimes returns pandas - timestamp type (for nanosecond resolution), and sometimes you get standard python datetime - (for microsecond resolution). + columns = [ + (field.name, field.dtype.to_value_type()) for field in feature_view.schema + ] + list(join_keys.items()) - While pandas timestamp class is a subclass of python datetime, it doesn't always behave the - same way. We convert it to normal datetime so that consumers downstream don't have to deal - with these quirks. - """ + proto_values_by_column = { + column: python_values_to_proto_values( + table.column(column).to_numpy(zero_copy_only=False), value_type + ) + for column, value_type in columns + } - if isinstance(ts, pandas.Timestamp): - return ts.to_pydatetime() - else: - return ts - - for row in zip(*table.to_pydict().values()): - entity_key = EntityKeyProto() - for join_key in join_keys: - entity_key.join_keys.append(join_key) - idx = table.column_names.index(join_key) - value = python_value_to_proto_value(row[idx]) - entity_key.entity_values.append(value) - feature_dict = {} - for feature in feature_view.features: - idx = table.column_names.index(feature.name) - value = python_value_to_proto_value(row[idx], feature.dtype) - feature_dict[feature.name] = value - event_timestamp_idx = table.column_names.index( - feature_view.input.event_timestamp_column + entity_keys = [ + EntityKeyProto( + join_keys=join_keys, + entity_values=[proto_values_by_column[k][idx] for k in join_keys], ) - event_timestamp = _coerce_datetime(row[event_timestamp_idx]) + for idx in range(table.num_rows) + ] - if feature_view.input.created_timestamp_column: - created_timestamp_idx = table.column_names.index( - feature_view.input.created_timestamp_column + # Serialize the features per row + feature_dict = { + feature.name: proto_values_by_column[feature.name] + for feature in feature_view.features + } + features = [dict(zip(feature_dict, vars)) for vars in zip(*feature_dict.values())] + + # Convert event_timestamps + event_timestamps = [ + _coerce_datetime(val) + for val in pandas.to_datetime( + table.column(feature_view.batch_source.timestamp_field).to_numpy( + zero_copy_only=False ) - created_timestamp = _coerce_datetime(row[created_timestamp_idx]) - else: - created_timestamp = None - - rows_to_write.append( - (entity_key, feature_dict, event_timestamp, created_timestamp) ) - return rows_to_write + ] + + # Convert created_timestamps if they exist + if feature_view.batch_source.created_timestamp_column: + created_timestamps = [ + _coerce_datetime(val) + for val in pandas.to_datetime( + table.column( + feature_view.batch_source.created_timestamp_column + ).to_numpy(zero_copy_only=False) + ) + ] + else: + created_timestamps = [None] * table.num_rows + + return list(zip(entity_keys, features, event_timestamps, created_timestamps)) diff --git a/sdk/python/feast/infra/transformation_servers/Dockerfile b/sdk/python/feast/infra/transformation_servers/Dockerfile new file mode 100644 index 0000000000..79997ce01b --- /dev/null +++ b/sdk/python/feast/infra/transformation_servers/Dockerfile @@ -0,0 +1,15 @@ +FROM python:3.7-slim + +# Copy app handler code +COPY sdk/python/feast/infra/transformation_servers/app.py app.py + +# Copy necessary parts of the Feast codebase +COPY sdk/python sdk/python +COPY protos protos +COPY README.md README.md + +# Install dependencies +RUN pip3 install -e 'sdk/python' + +# Start feature transformation server +CMD [ "python", "app.py" ] diff --git a/sdk/python/feast/infra/transformation_servers/app.py b/sdk/python/feast/infra/transformation_servers/app.py new file mode 100644 index 0000000000..acfb0959ba --- /dev/null +++ b/sdk/python/feast/infra/transformation_servers/app.py @@ -0,0 +1,63 @@ +import base64 +import os +import tempfile +import threading +from pathlib import Path + +import yaml + +from feast import FeatureStore +from feast.constants import ( + DEFAULT_FEATURE_TRANSFORMATION_SERVER_PORT, + FEATURE_STORE_YAML_ENV_NAME, + FEATURE_TRANSFORMATION_SERVER_PORT_ENV_NAME, + REGISTRY_ENV_NAME, +) +from feast.infra.local import LocalRegistryStore +from feast.registry import get_registry_store_class_from_scheme + +# Load RepoConfig +config_base64 = os.environ[FEATURE_STORE_YAML_ENV_NAME] +config_bytes = base64.b64decode(config_base64) + +# Create a new unique directory for writing feature_store.yaml +repo_path = Path(tempfile.mkdtemp()) + +with open(repo_path / "feature_store.yaml", "wb") as f: + f.write(config_bytes) + +# Write registry contents for local registries +config_string = config_bytes.decode("utf-8") +raw_config = yaml.safe_load(config_string) +registry = raw_config["registry"] +registry_path = registry["path"] if isinstance(registry, dict) else registry +registry_store_class = get_registry_store_class_from_scheme(registry_path) +if registry_store_class == LocalRegistryStore and not os.path.exists(registry_path): + registry_base64 = os.environ[REGISTRY_ENV_NAME] + registry_bytes = base64.b64decode(registry_base64) + registry_dir = os.path.dirname(registry_path) + if not os.path.exists(repo_path / registry_dir): + os.makedirs(repo_path / registry_dir) + with open(repo_path / registry_path, "wb") as f: + f.write(registry_bytes) + +# Initialize the feature store +store = FeatureStore(repo_path=str(repo_path.resolve())) + +if isinstance(registry, dict) and registry.get("cache_ttl_seconds", 0) > 0: + # disable synchronous refresh + store.config.registry.cache_ttl_seconds = 0 + + # enable asynchronous refresh + def async_refresh(): + store.refresh_registry() + threading.Timer(registry["cache_ttl_seconds"], async_refresh).start() + + async_refresh() + +# Start the feature transformation server +port = ( + os.environ.get(FEATURE_TRANSFORMATION_SERVER_PORT_ENV_NAME) + or DEFAULT_FEATURE_TRANSFORMATION_SERVER_PORT +) +store.serve_transformations(port) diff --git a/sdk/python/feast/infra/utils/aws_utils.py b/sdk/python/feast/infra/utils/aws_utils.py index 235f427b76..fe5eed774e 100644 --- a/sdk/python/feast/infra/utils/aws_utils.py +++ b/sdk/python/feast/infra/utils/aws_utils.py @@ -1,26 +1,39 @@ +import contextlib import os import tempfile import uuid -from typing import Tuple +from typing import Any, Dict, Iterator, Optional, Tuple import pandas as pd import pyarrow as pa import pyarrow.parquet as pq -from tenacity import retry, retry_if_exception_type, wait_exponential +from tenacity import ( + retry, + retry_if_exception_type, + stop_after_attempt, + wait_exponential, +) -from feast.errors import RedshiftCredentialsError, RedshiftQueryError +from feast.errors import ( + RedshiftCredentialsError, + RedshiftQueryError, + RedshiftTableNameTooLong, +) from feast.type_map import pa_to_redshift_value_type try: import boto3 from botocore.config import Config - from botocore.exceptions import ClientError + from botocore.exceptions import ClientError, ConnectionClosedError except ImportError as e: from feast.errors import FeastExtrasDependencyImportError raise FeastExtrasDependencyImportError("aws", str(e)) +REDSHIFT_TABLE_NAME_MAX_LENGTH = 127 + + def get_redshift_data_client(aws_region: str): """ Get the Redshift Data API Service client for the given AWS region. @@ -49,6 +62,12 @@ def get_bucket_and_key(s3_path: str) -> Tuple[str, str]: return bucket, key +@retry( + wait=wait_exponential(multiplier=1, max=4), + retry=retry_if_exception_type(ConnectionClosedError), + stop=stop_after_attempt(5), + reraise=True, +) def execute_redshift_statement_async( redshift_data_client, cluster_id: str, database: str, user: str, query: str ) -> dict: @@ -81,8 +100,9 @@ class RedshiftStatementNotFinishedError(Exception): @retry( - wait=wait_exponential(multiplier=0.1, max=30), + wait=wait_exponential(multiplier=1, max=30), retry=retry_if_exception_type(RedshiftStatementNotFinishedError), + reraise=True, ) def wait_for_redshift_statement(redshift_data_client, statement: dict) -> None: """Waits for the Redshift statement to finish. Raises RedshiftQueryError if the statement didn't succeed. @@ -131,10 +151,34 @@ def execute_redshift_statement( def get_redshift_statement_result(redshift_data_client, statement_id: str) -> dict: - """ Get the Redshift statement result """ + """Get the Redshift statement result""" return redshift_data_client.get_statement_result(Id=statement_id) +def upload_df_to_s3(s3_resource, s3_path: str, df: pd.DataFrame,) -> None: + """Uploads a Pandas DataFrame to S3 as a parquet file + + Args: + s3_resource: S3 Resource object + s3_path: S3 path where the Parquet file is temporarily uploaded + df: The Pandas DataFrame to upload + + Returns: None + + """ + bucket, key = get_bucket_and_key(s3_path) + + # Drop the index so that we dont have unnecessary columns + df.reset_index(drop=True, inplace=True) + + table = pa.Table.from_pandas(df) + # Write the PyArrow Table on disk in Parquet format and upload it to S3 + with tempfile.TemporaryDirectory() as temp_dir: + file_path = f"{temp_dir}/{uuid.uuid4()}.parquet" + pq.write_table(table, file_path) + s3_resource.Object(bucket, key).put(Body=open(file_path, "rb")) + + def upload_df_to_redshift( redshift_data_client, cluster_id: str, @@ -145,7 +189,7 @@ def upload_df_to_redshift( iam_role: str, table_name: str, df: pd.DataFrame, -) -> None: +): """Uploads a Pandas DataFrame to Redshift as a new table. The caller is responsible for deleting the table when no longer necessary. @@ -169,17 +213,28 @@ def upload_df_to_redshift( table_name: The name of the new Redshift table where we copy the dataframe df: The Pandas DataFrame to upload - Returns: None - + Raises: + RedshiftTableNameTooLong: The specified table name is too long. """ + if len(table_name) > REDSHIFT_TABLE_NAME_MAX_LENGTH: + raise RedshiftTableNameTooLong(table_name) + bucket, key = get_bucket_and_key(s3_path) - # Convert Pandas DataFrame into PyArrow table and compile the Redshift table schema + # Drop the index so that we dont have unnecessary columns + df.reset_index(drop=True, inplace=True) + + # Convert Pandas DataFrame into PyArrow table and compile the Redshift table schema. + # Note, if the underlying data has missing values, + # pandas will convert those values to np.nan if the dtypes are numerical (floats, ints, etc.) or boolean. + # If the dtype is 'object', then missing values are inferred as python `None`s. + # More details at: + # https://pandas.pydata.org/pandas-docs/stable/user_guide/missing_data.html#values-considered-missing table = pa.Table.from_pandas(df) column_names, column_types = [], [] for field in table.schema: column_names.append(field.name) - column_types.append(pa_to_redshift_value_type(str(field.type))) + column_types.append(pa_to_redshift_value_type(field.type)) column_query_list = ", ".join( [ f"{column_name} {column_type}" @@ -207,8 +262,51 @@ def upload_df_to_redshift( s3_resource.Object(bucket, key).delete() +@contextlib.contextmanager +def temporarily_upload_df_to_redshift( + redshift_data_client, + cluster_id: str, + database: str, + user: str, + s3_resource, + s3_path: str, + iam_role: str, + table_name: str, + df: pd.DataFrame, +) -> Iterator[None]: + """Uploads a Pandas DataFrame to Redshift as a new table with cleanup logic. + + This is essentially the same as upload_df_to_redshift (check out its docstring for full details), + but unlike it this method is a generator and should be used with `with` block. For example: + + >>> with temporarily_upload_df_to_redshift(...): # doctest: +SKIP + >>> # Use `table_name` table in Redshift here + >>> # `table_name` will not exist at this point, since it's cleaned up by the `with` block + + """ + # Upload the dataframe to Redshift + upload_df_to_redshift( + redshift_data_client, + cluster_id, + database, + user, + s3_resource, + s3_path, + iam_role, + table_name, + df, + ) + + yield + + # Clean up the uploaded Redshift table + execute_redshift_statement( + redshift_data_client, cluster_id, database, user, f"DROP TABLE {table_name}", + ) + + def download_s3_directory(s3_resource, bucket: str, key: str, local_dir: str): - """ Download the S3 directory to a local disk """ + """Download the S3 directory to a local disk""" bucket_obj = s3_resource.Bucket(bucket) if key != "" and not key.endswith("/"): key = key + "/" @@ -220,7 +318,7 @@ def download_s3_directory(s3_resource, bucket: str, key: str, local_dir: str): def delete_s3_directory(s3_resource, bucket: str, key: str): - """ Delete S3 directory recursively """ + """Delete S3 directory recursively""" bucket_obj = s3_resource.Bucket(bucket) if key != "" and not key.endswith("/"): key = key + "/" @@ -237,16 +335,24 @@ def execute_redshift_query_and_unload_to_s3( iam_role: str, query: str, ) -> None: - """ Unload Redshift Query results to S3 """ + """Unload Redshift Query results to S3 + + Args: + redshift_data_client: Redshift Data API Service client + cluster_id: Redshift Cluster Identifier + database: Redshift Database Name + user: Redshift username + s3_path: S3 directory where the unloaded data is written + iam_role: IAM Role for Redshift to assume during the UNLOAD command. + The role must grant permission to write to the S3 location. + query: The SQL query to execute + + """ # Run the query, unload the results to S3 unique_table_name = "_" + str(uuid.uuid4()).replace("-", "") - unload_query = f""" - CREATE TEMPORARY TABLE {unique_table_name} AS ({query}); - UNLOAD ('SELECT * FROM {unique_table_name}') TO '{s3_path}/' IAM_ROLE '{iam_role}' PARQUET - """ - execute_redshift_statement( - redshift_data_client, cluster_id, database, user, unload_query - ) + query = f"CREATE TEMPORARY TABLE {unique_table_name} AS ({query});\n" + query += f"UNLOAD ('SELECT * FROM {unique_table_name}') TO '{s3_path}/' IAM_ROLE '{iam_role}' PARQUET" + execute_redshift_statement(redshift_data_client, cluster_id, database, user, query) def unload_redshift_query_to_pa( @@ -259,11 +365,11 @@ def unload_redshift_query_to_pa( iam_role: str, query: str, ) -> pa.Table: - """ Unload Redshift Query results to S3 and get the results in PyArrow Table format """ + """Unload Redshift Query results to S3 and get the results in PyArrow Table format""" bucket, key = get_bucket_and_key(s3_path) execute_redshift_query_and_unload_to_s3( - redshift_data_client, cluster_id, database, user, s3_path, iam_role, query + redshift_data_client, cluster_id, database, user, s3_path, iam_role, query, ) with tempfile.TemporaryDirectory() as temp_dir: @@ -282,7 +388,7 @@ def unload_redshift_query_to_df( iam_role: str, query: str, ) -> pd.DataFrame: - """ Unload Redshift Query results to S3 and get the results in Pandas DataFrame format """ + """Unload Redshift Query results to S3 and get the results in Pandas DataFrame format""" table = unload_redshift_query_to_pa( redshift_data_client, cluster_id, @@ -294,3 +400,113 @@ def unload_redshift_query_to_df( query, ) return table.to_pandas() + + +def get_lambda_function(lambda_client, function_name: str) -> Optional[Dict]: + """ + Get the AWS Lambda function by name or return None if it doesn't exist. + Args: + lambda_client: AWS Lambda client. + function_name: Name of the AWS Lambda function. + + Returns: Either a dictionary containing the get_function API response, or None if it doesn't exist. + + """ + try: + return lambda_client.get_function(FunctionName=function_name)["Configuration"] + except ClientError as ce: + # If the resource could not be found, return None. + # Otherwise bubble up the exception (most likely permission errors) + if ce.response["Error"]["Code"] == "ResourceNotFoundException": + return None + else: + raise + + +def delete_lambda_function(lambda_client, function_name: str) -> Dict: + """ + Delete the AWS Lambda function by name. + Args: + lambda_client: AWS Lambda client. + function_name: Name of the AWS Lambda function. + + Returns: The delete_function API response dict + + """ + return lambda_client.delete_function(FunctionName=function_name) + + +@retry( + wait=wait_exponential(multiplier=1, max=4), + retry=retry_if_exception_type(ClientError), + stop=stop_after_attempt(5), + reraise=True, +) +def update_lambda_function_environment( + lambda_client, function_name: str, environment: Dict[str, Any] +) -> None: + """ + Update AWS Lambda function environment. The function is retried multiple times in case another action is + currently being run on the lambda (e.g. it's being created or being updated in parallel). + Args: + lambda_client: AWS Lambda client. + function_name: Name of the AWS Lambda function. + environment: The desired lambda environment. + + """ + lambda_client.update_function_configuration( + FunctionName=function_name, Environment=environment + ) + + +def get_first_api_gateway(api_gateway_client, api_gateway_name: str) -> Optional[Dict]: + """ + Get the first API Gateway with the given name. Note, that API Gateways can have the same name. + They are identified by AWS-generated ID, which is unique. Therefore this method lists all API + Gateways and returns the first one with matching name. If no matching name is found, None is returned. + Args: + api_gateway_client: API Gateway V2 Client. + api_gateway_name: Name of the API Gateway function. + + Returns: Either a dictionary containing the get_api response, or None if it doesn't exist + + """ + response = api_gateway_client.get_apis() + apis = response.get("Items", []) + + # Limit the number of times we page through the API. + for _ in range(10): + # Try finding the match before getting the next batch of api gateways from AWS + for api in apis: + if api.get("Name") == api_gateway_name: + return api + + # Break out of the loop if there's no next batch of api gateways + next_token = response.get("NextToken") + if not next_token: + break + + # Get the next batch of api gateways using next_token + response = api_gateway_client.get_apis(NextToken=next_token) + apis = response.get("Items", []) + + # Return None if API Gateway with such name was not found + return None + + +def delete_api_gateway(api_gateway_client, api_gateway_id: str) -> Dict: + """ + Delete the API Gateway given ID. + Args: + api_gateway_client: API Gateway V2 Client. + api_gateway_id: API Gateway ID to delete. + + Returns: The delete_api API response dict. + + """ + return api_gateway_client.delete_api(ApiId=api_gateway_id) + + +def get_account_id() -> str: + """Get AWS Account ID""" + return boto3.client("sts").get_caller_identity().get("Account") diff --git a/sdk/python/feast/infra/utils/snowflake_utils.py b/sdk/python/feast/infra/utils/snowflake_utils.py new file mode 100644 index 0000000000..a467a9de42 --- /dev/null +++ b/sdk/python/feast/infra/utils/snowflake_utils.py @@ -0,0 +1,316 @@ +import configparser +import os +import random +import string +from logging import getLogger +from tempfile import TemporaryDirectory +from typing import Any, Dict, Iterator, List, Optional, Tuple, cast + +import pandas as pd +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import serialization +from tenacity import ( + retry, + retry_if_exception_type, + stop_after_attempt, + wait_exponential, +) + +from feast.errors import SnowflakeIncompleteConfig, SnowflakeQueryUnknownError + +try: + import snowflake.connector + from snowflake.connector import ProgrammingError, SnowflakeConnection + from snowflake.connector.cursor import SnowflakeCursor +except ImportError as e: + from feast.errors import FeastExtrasDependencyImportError + + raise FeastExtrasDependencyImportError("snowflake", str(e)) + + +getLogger("snowflake.connector.cursor").disabled = True +getLogger("snowflake.connector.connection").disabled = True +getLogger("snowflake.connector.network").disabled = True +logger = getLogger(__name__) + + +def execute_snowflake_statement(conn: SnowflakeConnection, query) -> SnowflakeCursor: + cursor = conn.cursor().execute(query) + if cursor is None: + raise SnowflakeQueryUnknownError(query) + return cursor + + +def get_snowflake_conn(config, autocommit=True) -> SnowflakeConnection: + assert config.type == "snowflake.offline" + config_header = "connections.feast_offline_store" + + config_dict = dict(config) + + # read config file + config_reader = configparser.ConfigParser() + config_reader.read([config_dict["config_path"]]) + kwargs: Dict[str, Any] = {} + if config_reader.has_section(config_header): + kwargs = dict(config_reader[config_header]) + + if "schema" in kwargs: + kwargs["schema_"] = kwargs.pop("schema") + + kwargs.update((k, v) for k, v in config_dict.items() if v is not None) + + for k, v in kwargs.items(): + if k in ["role", "warehouse", "database", "schema_"]: + kwargs[k] = f'"{v}"' + + if "schema_" in kwargs: + kwargs["schema"] = kwargs.pop("schema_") + else: + kwargs["schema"] = '"PUBLIC"' + + # https://docs.snowflake.com/en/user-guide/python-connector-example.html#using-key-pair-authentication-key-pair-rotation + # https://docs.snowflake.com/en/user-guide/key-pair-auth.html#configuring-key-pair-authentication + if "private_key" in kwargs: + kwargs["private_key"] = parse_private_key_path( + kwargs["private_key"], kwargs["private_key_passphrase"] + ) + + try: + conn = snowflake.connector.connect( + application="feast", autocommit=autocommit, **kwargs + ) + + return conn + except KeyError as e: + raise SnowflakeIncompleteConfig(e) + + +# TO DO -- sfc-gh-madkins +# Remove dependency on write_pandas function by falling back to native snowflake python connector +# Current issue is datetime[ns] types are read incorrectly in Snowflake, need to coerce to datetime[ns, UTC] +def write_pandas( + conn: SnowflakeConnection, + df: pd.DataFrame, + table_name: str, + database: Optional[str] = None, + schema: Optional[str] = None, + chunk_size: Optional[int] = None, + compression: str = "gzip", + on_error: str = "abort_statement", + parallel: int = 4, + quote_identifiers: bool = True, + auto_create_table: bool = False, + create_temp_table: bool = False, +): + """Allows users to most efficiently write back a pandas DataFrame to Snowflake. + + It works by dumping the DataFrame into Parquet files, uploading them and finally copying their data into the table. + + Returns whether all files were ingested correctly, number of chunks uploaded, and number of rows ingested + with all of the COPY INTO command's output for debugging purposes. + + Example usage: + import pandas + from snowflake.connector.pandas_tools import write_pandas + + df = pandas.DataFrame([('Mark', 10), ('Luke', 20)], columns=['name', 'balance']) + success, nchunks, nrows, _ = write_pandas(cnx, df, 'customers') + + Args: + conn: Connection to be used to communicate with Snowflake. + df: Dataframe we'd like to write back. + table_name: Table name where we want to insert into. + database: Database schema and table is in, if not provided the default one will be used (Default value = None). + schema: Schema table is in, if not provided the default one will be used (Default value = None). + chunk_size: Number of elements to be inserted once, if not provided all elements will be dumped once + (Default value = None). + compression: The compression used on the Parquet files, can only be gzip, or snappy. Gzip gives supposedly a + better compression, while snappy is faster. Use whichever is more appropriate (Default value = 'gzip'). + on_error: Action to take when COPY INTO statements fail, default follows documentation at: + https://docs.snowflake.com/en/sql-reference/sql/copy-into-table.html#copy-options-copyoptions + (Default value = 'abort_statement'). + parallel: Number of threads to be used when uploading chunks, default follows documentation at: + https://docs.snowflake.com/en/sql-reference/sql/put.html#optional-parameters (Default value = 4). + quote_identifiers: By default, identifiers, specifically database, schema, table and column names + (from df.columns) will be quoted. If set to False, identifiers are passed on to Snowflake without quoting. + I.e. identifiers will be coerced to uppercase by Snowflake. (Default value = True) + auto_create_table: When true, will automatically create a table with corresponding columns for each column in + the passed in DataFrame. The table will not be created if it already exists + create_temp_table: Will make the auto-created table as a temporary table + """ + if database is not None and schema is None: + raise ProgrammingError( + "Schema has to be provided to write_pandas when a database is provided" + ) + # This dictionary maps the compression algorithm to Snowflake put copy into command type + # https://docs.snowflake.com/en/sql-reference/sql/copy-into-table.html#type-parquet + compression_map = {"gzip": "auto", "snappy": "snappy"} + if compression not in compression_map.keys(): + raise ProgrammingError( + "Invalid compression '{}', only acceptable values are: {}".format( + compression, compression_map.keys() + ) + ) + if quote_identifiers: + location = ( + (('"' + database + '".') if database else "") + + (('"' + schema + '".') if schema else "") + + ('"' + table_name + '"') + ) + else: + location = ( + (database + "." if database else "") + + (schema + "." if schema else "") + + (table_name) + ) + if chunk_size is None: + chunk_size = len(df) + cursor: SnowflakeCursor = conn.cursor() + stage_name = create_temporary_sfc_stage(cursor) + + with TemporaryDirectory() as tmp_folder: + for i, chunk in chunk_helper(df, chunk_size): + chunk_path = os.path.join(tmp_folder, "file{}.txt".format(i)) + # Dump chunk into parquet file + chunk.to_parquet( + chunk_path, + compression=compression, + use_deprecated_int96_timestamps=True, + ) + # Upload parquet file + upload_sql = ( + "PUT /* Python:snowflake.connector.pandas_tools.write_pandas() */ " + "'file://{path}' @\"{stage_name}\" PARALLEL={parallel}" + ).format( + path=chunk_path.replace("\\", "\\\\").replace("'", "\\'"), + stage_name=stage_name, + parallel=parallel, + ) + logger.debug(f"uploading files with '{upload_sql}'") + cursor.execute(upload_sql, _is_internal=True) + # Remove chunk file + os.remove(chunk_path) + if quote_identifiers: + columns = '"' + '","'.join(list(df.columns)) + '"' + else: + columns = ",".join(list(df.columns)) + + if auto_create_table: + file_format_name = create_file_format(compression, compression_map, cursor) + infer_schema_sql = f"SELECT COLUMN_NAME, TYPE FROM table(infer_schema(location=>'@\"{stage_name}\"', file_format=>'{file_format_name}'))" + logger.debug(f"inferring schema with '{infer_schema_sql}'") + result_cursor = cursor.execute(infer_schema_sql, _is_internal=True) + if result_cursor is None: + raise SnowflakeQueryUnknownError(infer_schema_sql) + result = cast(List[Tuple[str, str]], result_cursor.fetchall()) + column_type_mapping: Dict[str, str] = dict(result) + # Infer schema can return the columns out of order depending on the chunking we do when uploading + # so we have to iterate through the dataframe columns to make sure we create the table with its + # columns in order + quote = '"' if quote_identifiers else "" + create_table_columns = ", ".join( + [f"{quote}{c}{quote} {column_type_mapping[c]}" for c in df.columns] + ) + create_table_sql = ( + f"CREATE {'TEMP ' if create_temp_table else ''}TABLE IF NOT EXISTS {location} " + f"({create_table_columns})" + f" /* Python:snowflake.connector.pandas_tools.write_pandas() */ " + ) + logger.debug(f"auto creating table with '{create_table_sql}'") + cursor.execute(create_table_sql, _is_internal=True) + drop_file_format_sql = f"DROP FILE FORMAT IF EXISTS {file_format_name}" + logger.debug(f"dropping file format with '{drop_file_format_sql}'") + cursor.execute(drop_file_format_sql, _is_internal=True) + + # in Snowflake, all parquet data is stored in a single column, $1, so we must select columns explicitly + # see (https://docs.snowflake.com/en/user-guide/script-data-load-transform-parquet.html) + if quote_identifiers: + parquet_columns = "$1:" + ",$1:".join(f'"{c}"' for c in df.columns) + else: + parquet_columns = "$1:" + ",$1:".join(df.columns) + copy_into_sql = ( + "COPY INTO {location} /* Python:snowflake.connector.pandas_tools.write_pandas() */ " + "({columns}) " + 'FROM (SELECT {parquet_columns} FROM @"{stage_name}") ' + "FILE_FORMAT=(TYPE=PARQUET COMPRESSION={compression}) " + "PURGE=TRUE ON_ERROR={on_error}" + ).format( + location=location, + columns=columns, + parquet_columns=parquet_columns, + stage_name=stage_name, + compression=compression_map[compression], + on_error=on_error, + ) + logger.debug("copying into with '{}'".format(copy_into_sql)) + # Snowflake returns the original cursor if the query execution succeeded. + result_cursor = cursor.execute(copy_into_sql, _is_internal=True) + if result_cursor is None: + raise SnowflakeQueryUnknownError(copy_into_sql) + result_cursor.close() + + +@retry( + wait=wait_exponential(multiplier=1, max=4), + retry=retry_if_exception_type(ProgrammingError), + stop=stop_after_attempt(5), + reraise=True, +) +def create_file_format( + compression: str, compression_map: Dict[str, str], cursor: SnowflakeCursor +) -> str: + file_format_name = ( + '"' + "".join(random.choice(string.ascii_lowercase) for _ in range(5)) + '"' + ) + file_format_sql = ( + f"CREATE FILE FORMAT {file_format_name} " + f"/* Python:snowflake.connector.pandas_tools.write_pandas() */ " + f"TYPE=PARQUET COMPRESSION={compression_map[compression]}" + ) + logger.debug(f"creating file format with '{file_format_sql}'") + cursor.execute(file_format_sql, _is_internal=True) + return file_format_name + + +@retry( + wait=wait_exponential(multiplier=1, max=4), + retry=retry_if_exception_type(ProgrammingError), + stop=stop_after_attempt(5), + reraise=True, +) +def create_temporary_sfc_stage(cursor: SnowflakeCursor) -> str: + stage_name = "".join(random.choice(string.ascii_lowercase) for _ in range(5)) + create_stage_sql = ( + "create temporary stage /* Python:snowflake.connector.pandas_tools.write_pandas() */ " + '"{stage_name}"' + ).format(stage_name=stage_name) + logger.debug(f"creating stage with '{create_stage_sql}'") + result_cursor = cursor.execute(create_stage_sql, _is_internal=True) + if result_cursor is None: + raise SnowflakeQueryUnknownError(create_stage_sql) + result_cursor.fetchall() + return stage_name + + +def chunk_helper(lst: pd.DataFrame, n: int) -> Iterator[Tuple[int, pd.DataFrame]]: + """Helper generator to chunk a sequence efficiently with current index like if enumerate was called on sequence.""" + for i in range(0, len(lst), n): + yield int(i / n), lst[i : i + n] + + +def parse_private_key_path(key_path: str, private_key_passphrase: str) -> bytes: + + with open(key_path, "rb") as key: + p_key = serialization.load_pem_private_key( + key.read(), + password=private_key_passphrase.encode(), + backend=default_backend(), + ) + + pkb = p_key.private_bytes( + encoding=serialization.Encoding.DER, + format=serialization.PrivateFormat.PKCS8, + encryption_algorithm=serialization.NoEncryption(), + ) + + return pkb diff --git a/sdk/python/feast/loaders/abstract_producer.py b/sdk/python/feast/loaders/abstract_producer.py deleted file mode 100644 index 14d9bc42b7..0000000000 --- a/sdk/python/feast/loaders/abstract_producer.py +++ /dev/null @@ -1,218 +0,0 @@ -# Copyright 2019 The Feast Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from typing import Optional, Union - -from tqdm import tqdm - - -class AbstractProducer: - """ - Abstract class for Kafka producers - """ - - def __init__(self, brokers: str, row_count: int, disable_progress_bar: bool): - self.brokers = brokers - self.row_count = row_count - - # Progress bar will always display average rate - self.pbar = tqdm( - total=row_count, unit="rows", smoothing=0, disable=disable_progress_bar - ) - - def produce(self, topic: str, data: bytes): - message = "{} should implement a produce method".format(self.__class__.__name__) - raise NotImplementedError(message) - - def flush(self, timeout: int): - message = "{} should implement a flush method".format(self.__class__.__name__) - raise NotImplementedError(message) - - def _inc_pbar(self, meta): - self.pbar.update(1) - - def _set_error(self, exception: str): - raise Exception(exception) - - def print_results(self) -> None: - """ - Print ingestion statistics. - - Returns: - None: None - """ - # Refresh and close tqdm progress bar - self.pbar.refresh() - - self.pbar.close() - - print("Ingestion complete!") - - print(f"\nIngestion statistics:" f"\nSuccess: {self.pbar.n}/{self.row_count}") - return None - - -class ConfluentProducer(AbstractProducer): - """ - Concrete implementation of Confluent Kafka producer (confluent-kafka) - """ - - def __init__(self, brokers: str, row_count: int, disable_progress_bar: bool): - from confluent_kafka import Producer - - self.producer = Producer({"bootstrap.servers": brokers}) - super().__init__(brokers, row_count, disable_progress_bar) - - def produce(self, topic: str, value: bytes) -> None: - """ - Generic produce that implements confluent-kafka's produce method to - push a byte encoded object into a Kafka topic. - - Args: - topic (str): Kafka topic. - value (bytes): Byte encoded object. - - Returns: - None: None. - """ - - try: - self.producer.produce(topic, value=value, callback=self._delivery_callback) - # Serve delivery callback queue. - # NOTE: Since produce() is an asynchronous API this poll() call - # will most likely not serve the delivery callback for the - # last produce()d message. - self.producer.poll(0) - except Exception as ex: - self._set_error(str(ex)) - - return None - - def flush(self, timeout: Optional[int]): - """ - Generic flush that implements confluent-kafka's flush method. - - Args: - timeout (Optional[int]): Timeout in seconds to wait for completion. - - Returns: - int: Number of messages still in queue. - """ - messages = self.producer.flush(timeout=timeout) - if messages: - raise Exception("Not all Kafka messages are successfully delivered.") - return messages - - def _delivery_callback(self, err: str, msg) -> None: - """ - Optional per-message delivery callback (triggered by poll() or flush()) - when a message has been successfully delivered or permanently failed - delivery (after retries). - - Although the msg argument is not used, the current method signature is - required as specified in the confluent-kafka documentation. - - Args: - err (str): Error message. - msg (): Kafka message. - - Returns: - None - """ - if err: - self._set_error(err) - else: - self._inc_pbar(None) - - -class KafkaPythonProducer(AbstractProducer): - """ - Concrete implementation of Python Kafka producer (kafka-python) - """ - - def __init__(self, brokers: str, row_count: int, disable_progress_bar: bool): - from kafka import KafkaProducer - - self.producer = KafkaProducer(bootstrap_servers=[brokers]) - super().__init__(brokers, row_count, disable_progress_bar) - - def produce(self, topic: str, value: bytes): - """ - Generic produce that implements kafka-python's send method to push a - byte encoded object into a Kafka topic. - - Args: - topic (str): Kafka topic. - value (bytes): Byte encoded object. - - Returns: - FutureRecordMetadata: resolves to RecordMetadata - - Raises: - KafkaTimeoutError: if unable to fetch topic metadata, or unable - to obtain memory buffer prior to configured max_block_ms - """ - return ( - self.producer.send(topic, value=value) - .add_callback(self._inc_pbar) - .add_errback(self._set_error) - ) - - def flush(self, timeout: Optional[int]): - """ - Generic flush that implements kafka-python's flush method. - - Args: - timeout (Optional[int]): timeout in seconds to wait for completion. - - Returns: - None - - Raises: - KafkaTimeoutError: failure to flush buffered records within the - provided timeout - """ - messages = self.producer.flush(timeout=timeout) - if messages: - raise Exception("Not all Kafka messages are successfully delivered.") - return messages - - -def get_producer( - brokers: str, row_count: int, disable_progress_bar: bool -) -> Union[ConfluentProducer, KafkaPythonProducer]: - """ - Simple context helper function that returns a AbstractProducer object when - invoked. - - This helper function will try to import confluent-kafka as a producer first. - - This helper function will fallback to kafka-python if it fails to import - confluent-kafka. - - Args: - brokers (str): Kafka broker information with hostname and port. - row_count (int): Number of rows in table - - Returns: - Union[ConfluentProducer, KafkaPythonProducer]: - Concrete implementation of a Kafka producer. Ig can be: - * confluent-kafka producer - * kafka-python producer - """ - try: - return ConfluentProducer(brokers, row_count, disable_progress_bar) - except ImportError: - print("Unable to import confluent-kafka, falling back to kafka-python") - return KafkaPythonProducer(brokers, row_count, disable_progress_bar) diff --git a/sdk/python/feast/loaders/ingest.py b/sdk/python/feast/loaders/ingest.py deleted file mode 100644 index 820d53317a..0000000000 --- a/sdk/python/feast/loaders/ingest.py +++ /dev/null @@ -1,244 +0,0 @@ -import glob -import os -import tempfile -import time -from math import ceil -from typing import Dict, List, Tuple, Union - -import pandas as pd -import pyarrow as pa -from pyarrow import parquet as pq - -from feast.config import Config -from feast.staging.storage_client import get_staging_client - - -def _check_field_mappings( - column_names: List[str], - feature_table_name: str, - feature_table_timestamp_column: str, - feature_table_field_mappings: Dict[str, str], -) -> None: - """ - Checks that all specified field mappings in FeatureTable can be found in - column names of specified ingestion source. - - Args: - column_names: Column names in provided ingestion source - feature_table_name: Name of FeatureTable - feature_table_timestamp_column: Timestamp column of FeatureTable - feature_table_field_mappings: Field mappings of FeatureTable - """ - - if feature_table_timestamp_column not in column_names: - raise ValueError( - f"Provided data source does not contain timestamp column {feature_table_timestamp_column} in columns {column_names}" - ) - - specified_field_mappings = list() - for k, v in feature_table_field_mappings.items(): - specified_field_mappings.append(v) - - is_valid = all(col_name in column_names for col_name in specified_field_mappings) - - if not is_valid: - raise Exception( - f"Provided data source does not contain all field mappings previously " - f"defined for FeatureTable, {feature_table_name}." - ) - - -def _write_non_partitioned_table_from_source( - column_names: List[str], table: pa.Table, chunk_size: int, max_workers: int -) -> Tuple[str, str]: - """ - Partitions dataset by date based on timestamp_column. - Assumes date_partition_column is in date format if provided. - - Args: - column_names: Column names in provided ingestion source - table: PyArrow table of Dataset - chunk_size: Number of worker processes to use to encode values. - max_workers: Amount of rows to load and ingest at a time. - Returns: - Tuple[str,str]: - Tuple containing parent directory path, destination path to - parquet file. - """ - dir_path = tempfile.mkdtemp() - - # Write table as parquet file with a specified row_group_size - tmp_table_name = f"{int(time.time())}.parquet" - dest_path = f"{dir_path}/{tmp_table_name}" - row_group_size = min(ceil(table.num_rows / max_workers), chunk_size) - pq.write_table(table=table, where=dest_path, row_group_size=row_group_size) - - # Remove table from memory - del table - - return dir_path, dest_path - - -def _write_partitioned_table_from_source( - column_names: List[str], - table: pa.Table, - feature_table_date_partition_column: str, - feature_table_timestamp_column: str, -) -> str: - """ - Partitions dataset by date based on timestamp_column. - Assumes date_partition_column is in date format if provided. - - Args: - column_names: Column names in provided ingestion source - table: PyArrow table of Dataset - feature_table_date_partition_column: Date-partition column of FeatureTable - feature_table_timestamp_column: Timestamp column of FeatureTable - Returns: - str: - Root directory which contains date partitioned files. - """ - dir_path = tempfile.mkdtemp() - - # Case: date_partition_column is provided and dataset does not contain it - if feature_table_date_partition_column not in column_names: - df = table.to_pandas() - df[feature_table_date_partition_column] = df[ - feature_table_timestamp_column - ].dt.date - table = pa.Table.from_pandas(df) - - pq.write_to_dataset( - table=table, - root_path=dir_path, - partition_cols=[feature_table_date_partition_column], - ) - - # Remove table from memory - del table - - return dir_path - - -def _read_table_from_source( - source: Union[pd.DataFrame, str] -) -> Tuple[pa.Table, List[str]]: - """ - Infers a data source type (path or Pandas DataFrame) and reads it in as - a PyArrow Table. - - Args: - source (Union[pd.DataFrame, str]): - Either a string path or Pandas DataFrame. - - Returns: - Tuple[pa.Table, List[str]]: - Tuple containing PyArrow table of dataset, and column names of PyArrow table. - """ - - # Pandas DataFrame detected - if isinstance(source, pd.DataFrame): - table = pa.Table.from_pandas(df=source) - - # Inferring a string path - elif isinstance(source, str): - file_path = source - filename, file_ext = os.path.splitext(file_path) - - if ".csv" in file_ext: - from pyarrow import csv - - table = csv.read_csv(filename) - elif ".json" in file_ext: - from pyarrow import json - - table = json.read_json(filename) - else: - table = pq.read_table(file_path) - else: - raise ValueError(f"Unknown data source provided for ingestion: {source}") - - # Ensure that PyArrow table is initialised - assert isinstance(table, pa.lib.Table) - - column_names = table.column_names - - return table, column_names - - -def _upload_to_file_source( - file_url: str, with_partitions: bool, dest_path: str, config: Config -) -> None: - """ - Uploads data into a FileSource. Currently supports GCS, S3 and Local FS. - - Args: - file_url: file url of FileSource defined for FeatureTable - with_partitions: whether to treat dest_path as dir with partitioned table - dest_path: path to file or dir to be uploaded - config: Config instance to configure FileSource - """ - from urllib.parse import urlparse - - uri = urlparse(file_url) - staging_client = get_staging_client(uri.scheme, config) - - if with_partitions: - for path in glob.glob(os.path.join(dest_path, "**/*")): - file_name = path.split("/")[-1] - partition_col = path.split("/")[-2] - with open(path, "rb") as f: - staging_client.upload_fileobj( - f, - path, - remote_uri=uri._replace( - path=str(uri.path).rstrip("/") - + "/" - + partition_col - + "/" - + file_name - ), - ) - else: - file_name = dest_path.split("/")[-1] - with open(dest_path, "rb") as f: - staging_client.upload_fileobj( - f, - dest_path, - remote_uri=uri._replace( - path=str(uri.path).rstrip("/") + "/" + file_name - ), - ) - - -def _upload_to_bq_source( - bq_table_ref: str, feature_table_timestamp_column: str, dest_path: str -) -> None: - """ - Uploads data into a BigQuerySource. - - Args: - bq_table_ref: BigQuery table reference of format "project:dataset_name.table_name" defined for FeatureTable - feature_table_timestamp_column: Timestamp column of FeatureTable - dest_path: File path to existing parquet file - """ - from google.cloud import bigquery - - gcp_project, _ = bq_table_ref.split(":") - - bq_client = bigquery.Client(project=gcp_project) - - bq_table_ref = bq_table_ref.replace(":", ".") - table = bigquery.table.Table(bq_table_ref) - - job_config = bigquery.LoadJobConfig() - job_config.source_format = bigquery.SourceFormat.PARQUET - - time_partitioning_obj = bigquery.table.TimePartitioning( - field=feature_table_timestamp_column - ) - job_config.time_partitioning = time_partitioning_obj - with open(dest_path, "rb") as source_file: - bq_client.load_table_from_file( - source_file, table, job_config=job_config - ).result() diff --git a/sdk/python/feast/on_demand_feature_view.py b/sdk/python/feast/on_demand_feature_view.py new file mode 100644 index 0000000000..a807f3b4a4 --- /dev/null +++ b/sdk/python/feast/on_demand_feature_view.py @@ -0,0 +1,589 @@ +import copy +import functools +import warnings +from types import MethodType +from typing import Dict, List, Optional, Type, Union + +import dill +import pandas as pd + +from feast.base_feature_view import BaseFeatureView +from feast.data_source import RequestSource +from feast.errors import RegistryInferenceFailure, SpecifiedFeaturesNotPresentError +from feast.feature import Feature +from feast.feature_view import FeatureView +from feast.feature_view_projection import FeatureViewProjection +from feast.field import Field, from_value_type +from feast.protos.feast.core.OnDemandFeatureView_pb2 import ( + OnDemandFeatureView as OnDemandFeatureViewProto, +) +from feast.protos.feast.core.OnDemandFeatureView_pb2 import ( + OnDemandFeatureViewMeta, + OnDemandFeatureViewSpec, + OnDemandSource, +) +from feast.protos.feast.core.OnDemandFeatureView_pb2 import ( + UserDefinedFunction as UserDefinedFunctionProto, +) +from feast.type_map import ( + feast_value_type_to_pandas_type, + python_type_to_feast_value_type, +) +from feast.usage import log_exceptions +from feast.value_type import ValueType + +warnings.simplefilter("once", DeprecationWarning) + + +class OnDemandFeatureView(BaseFeatureView): + """ + [Experimental] An OnDemandFeatureView defines a logical group of features that are + generated by applying a transformation on a set of input sources, such as feature + views and request data sources. + + Attributes: + name: The unique name of the on demand feature view. + features: The list of features in the output of the on demand feature view. + source_feature_view_projections: A map from input source names to actual input + sources with type FeatureViewProjection. + source_request_sources: A map from input source names to the actual input + sources with type RequestSource. + udf: The user defined transformation function, which must take pandas dataframes + as inputs. + description: A human-readable description. + tags: A dictionary of key-value pairs to store arbitrary metadata. + owner: The owner of the on demand feature view, typically the email of the primary + maintainer. + """ + + # TODO(adchia): remove inputs from proto and declaration + name: str + features: List[Field] + source_feature_view_projections: Dict[str, FeatureViewProjection] + source_request_sources: Dict[str, RequestSource] + udf: MethodType + description: str + tags: Dict[str, str] + owner: str + + @log_exceptions + def __init__( + self, + *args, + name: Optional[str] = None, + features: Optional[List[Feature]] = None, + sources: Optional[ + Dict[str, Union[FeatureView, FeatureViewProjection, RequestSource]] + ] = None, + udf: Optional[MethodType] = None, + inputs: Optional[ + Dict[str, Union[FeatureView, FeatureViewProjection, RequestSource]] + ] = None, + schema: Optional[List[Field]] = None, + description: str = "", + tags: Optional[Dict[str, str]] = None, + owner: str = "", + ): + """ + Creates an OnDemandFeatureView object. + + Args: + name: The unique name of the on demand feature view. + features (deprecated): The list of features in the output of the on demand + feature view, after the transformation has been applied. + sources (optional): A map from input source names to the actual input sources, + which may be feature views, feature view projections, or request data sources. + These sources serve as inputs to the udf, which will refer to them by name. + udf (optional): The user defined transformation function, which must take pandas + dataframes as inputs. + inputs (optional): A map from input source names to the actual input sources, + which may be feature views, feature view projections, or request data sources. + These sources serve as inputs to the udf, which will refer to them by name. + schema (optional): The list of features in the output of the on demand feature + view, after the transformation has been applied. + description (optional): A human-readable description. + tags (optional): A dictionary of key-value pairs to store arbitrary metadata. + owner (optional): The owner of the on demand feature view, typically the email + of the primary maintainer. + """ + positional_attributes = ["name", "features", "inputs", "udf"] + + _name = name + + _schema = schema or [] + if len(_schema) == 0 and features is not None: + _schema = [Field.from_feature(feature) for feature in features] + if features is not None: + warnings.warn( + ( + "The `features` parameter is being deprecated in favor of the `schema` parameter. " + "Please switch from using `features` to `schema`. This will also requiring switching " + "feature definitions from using `Feature` to `Field`. Feast 0.21 and onwards will not " + "support the `features` parameter." + ), + DeprecationWarning, + ) + + _sources = sources or inputs + if inputs and sources: + raise ValueError("At most one of `sources` or `inputs` can be specified.") + elif inputs: + warnings.warn( + ( + "The `inputs` parameter is being deprecated. Please use `sources` instead. " + "Feast 0.21 and onwards will not support the `inputs` parameter." + ), + DeprecationWarning, + ) + + _udf = udf + + if args: + warnings.warn( + ( + "On demand feature view parameters should be specified as keyword arguments " + "instead of positional arguments. Feast 0.23 and onwards will not support " + "positional arguments in on demand feature view definitions." + ), + DeprecationWarning, + ) + if len(args) > len(positional_attributes): + raise ValueError( + f"Only {', '.join(positional_attributes)} are allowed as positional args " + f"when defining feature views, for backwards compatibility." + ) + if len(args) >= 1: + _name = args[0] + if len(args) >= 2: + _schema = args[1] + # Convert Features to Fields. + if len(_schema) > 0 and isinstance(_schema[0], Feature): + _schema = [Field.from_feature(feature) for feature in _schema] + warnings.warn( + ( + "The `features` parameter is being deprecated in favor of the `schema` parameter. " + "Please switch from using `features` to `schema`. This will also requiring switching " + "feature definitions from using `Feature` to `Field`. Feast 0.21 and onwards will not " + "support the `features` parameter." + ), + DeprecationWarning, + ) + if len(args) >= 3: + _sources = args[2] + warnings.warn( + ( + "The `inputs` parameter is being deprecated. Please use `sources` instead. " + "Feast 0.21 and onwards will not support the `inputs` parameter." + ), + DeprecationWarning, + ) + if len(args) >= 4: + _udf = args[3] + + if not _name: + raise ValueError( + "The name of the on demand feature view must be specified." + ) + + if not _sources: + raise ValueError("The `sources` parameter must be specified.") + + super().__init__( + name=_name, + features=_schema, + description=description, + tags=tags, + owner=owner, + ) + + assert _sources is not None + self.source_feature_view_projections: Dict[str, FeatureViewProjection] = {} + self.source_request_sources: Dict[str, RequestSource] = {} + for source_name, odfv_source in _sources.items(): + if isinstance(odfv_source, RequestSource): + self.source_request_sources[source_name] = odfv_source + elif isinstance(odfv_source, FeatureViewProjection): + self.source_feature_view_projections[source_name] = odfv_source + else: + self.source_feature_view_projections[ + source_name + ] = odfv_source.projection + + if _udf is None: + raise ValueError("The `udf` parameter must be specified.") + assert _udf + self.udf = _udf + + @property + def proto_class(self) -> Type[OnDemandFeatureViewProto]: + return OnDemandFeatureViewProto + + def __copy__(self): + fv = OnDemandFeatureView( + name=self.name, + schema=self.features, + sources=dict( + **self.source_feature_view_projections, **self.source_request_sources, + ), + udf=self.udf, + description=self.description, + tags=self.tags, + owner=self.owner, + ) + fv.projection = copy.copy(self.projection) + return fv + + def __eq__(self, other): + if not isinstance(other, OnDemandFeatureView): + raise TypeError( + "Comparisons should only involve OnDemandFeatureView class objects." + ) + + if not super().__eq__(other): + return False + + if ( + self.source_feature_view_projections + != other.source_feature_view_projections + or self.source_request_sources != other.source_request_sources + or self.udf.__code__.co_code != other.udf.__code__.co_code + ): + return False + + return True + + def __hash__(self): + return super().__hash__() + + def to_proto(self) -> OnDemandFeatureViewProto: + """ + Converts an on demand feature view object to its protobuf representation. + + Returns: + A OnDemandFeatureViewProto protobuf. + """ + meta = OnDemandFeatureViewMeta() + if self.created_timestamp: + meta.created_timestamp.FromDatetime(self.created_timestamp) + if self.last_updated_timestamp: + meta.last_updated_timestamp.FromDatetime(self.last_updated_timestamp) + sources = {} + for source_name, fv_projection in self.source_feature_view_projections.items(): + sources[source_name] = OnDemandSource( + feature_view_projection=fv_projection.to_proto() + ) + for (source_name, request_sources,) in self.source_request_sources.items(): + sources[source_name] = OnDemandSource( + request_data_source=request_sources.to_proto() + ) + + spec = OnDemandFeatureViewSpec( + name=self.name, + features=[feature.to_proto() for feature in self.features], + sources=sources, + user_defined_function=UserDefinedFunctionProto( + name=self.udf.__name__, body=dill.dumps(self.udf, recurse=True), + ), + description=self.description, + tags=self.tags, + owner=self.owner, + ) + + return OnDemandFeatureViewProto(spec=spec, meta=meta) + + @classmethod + def from_proto(cls, on_demand_feature_view_proto: OnDemandFeatureViewProto): + """ + Creates an on demand feature view from a protobuf representation. + + Args: + on_demand_feature_view_proto: A protobuf representation of an on-demand feature view. + + Returns: + A OnDemandFeatureView object based on the on-demand feature view protobuf. + """ + sources = {} + for ( + source_name, + on_demand_source, + ) in on_demand_feature_view_proto.spec.sources.items(): + if on_demand_source.WhichOneof("source") == "feature_view": + sources[source_name] = FeatureView.from_proto( + on_demand_source.feature_view + ).projection + elif on_demand_source.WhichOneof("source") == "feature_view_projection": + sources[source_name] = FeatureViewProjection.from_proto( + on_demand_source.feature_view_projection + ) + else: + sources[source_name] = RequestSource.from_proto( + on_demand_source.request_data_source + ) + on_demand_feature_view_obj = cls( + name=on_demand_feature_view_proto.spec.name, + schema=[ + Field( + name=feature.name, + dtype=from_value_type(ValueType(feature.value_type)), + ) + for feature in on_demand_feature_view_proto.spec.features + ], + sources=sources, + udf=dill.loads( + on_demand_feature_view_proto.spec.user_defined_function.body + ), + description=on_demand_feature_view_proto.spec.description, + tags=dict(on_demand_feature_view_proto.spec.tags), + owner=on_demand_feature_view_proto.spec.owner, + ) + + # FeatureViewProjections are not saved in the OnDemandFeatureView proto. + # Create the default projection. + on_demand_feature_view_obj.projection = FeatureViewProjection.from_definition( + on_demand_feature_view_obj + ) + + if on_demand_feature_view_proto.meta.HasField("created_timestamp"): + on_demand_feature_view_obj.created_timestamp = ( + on_demand_feature_view_proto.meta.created_timestamp.ToDatetime() + ) + if on_demand_feature_view_proto.meta.HasField("last_updated_timestamp"): + on_demand_feature_view_obj.last_updated_timestamp = ( + on_demand_feature_view_proto.meta.last_updated_timestamp.ToDatetime() + ) + + return on_demand_feature_view_obj + + def get_request_data_schema(self) -> Dict[str, ValueType]: + schema: Dict[str, ValueType] = {} + for request_source in self.source_request_sources.values(): + if isinstance(request_source.schema, List): + new_schema = {} + for field in request_source.schema: + new_schema[field.name] = field.dtype.to_value_type() + schema.update(new_schema) + elif isinstance(request_source.schema, Dict): + schema.update(request_source.schema) + else: + raise Exception( + f"Request source schema is not correct type: ${str(type(request_source.schema))}" + ) + return schema + + def get_transformed_features_df( + self, df_with_features: pd.DataFrame, full_feature_names: bool = False, + ) -> pd.DataFrame: + # Apply on demand transformations + columns_to_cleanup = [] + for source_fv_projection in self.source_feature_view_projections.values(): + for feature in source_fv_projection.features: + full_feature_ref = f"{source_fv_projection.name}__{feature.name}" + if full_feature_ref in df_with_features.keys(): + # Make sure the partial feature name is always present + df_with_features[feature.name] = df_with_features[full_feature_ref] + columns_to_cleanup.append(feature.name) + elif feature.name in df_with_features.keys(): + # Make sure the full feature name is always present + df_with_features[full_feature_ref] = df_with_features[feature.name] + columns_to_cleanup.append(full_feature_ref) + + # Compute transformed values and apply to each result row + df_with_transformed_features = self.udf.__call__(df_with_features) + + # Work out whether the correct columns names are used. + rename_columns: Dict[str, str] = {} + for feature in self.features: + short_name = feature.name + long_name = f"{self.projection.name_to_use()}__{feature.name}" + if ( + short_name in df_with_transformed_features.columns + and full_feature_names + ): + rename_columns[short_name] = long_name + elif not full_feature_names: + # Long name must be in dataframe. + rename_columns[long_name] = short_name + + # Cleanup extra columns used for transformation + df_with_features.drop(columns=columns_to_cleanup, inplace=True) + return df_with_transformed_features.rename(columns=rename_columns) + + def infer_features(self): + """ + Infers the set of features associated to this feature view from the input source. + + Raises: + RegistryInferenceFailure: The set of features could not be inferred. + """ + df = pd.DataFrame() + for feature_view_projection in self.source_feature_view_projections.values(): + for feature in feature_view_projection.features: + dtype = feast_value_type_to_pandas_type(feature.dtype.to_value_type()) + df[f"{feature_view_projection.name}__{feature.name}"] = pd.Series( + dtype=dtype + ) + df[f"{feature.name}"] = pd.Series(dtype=dtype) + for request_data in self.source_request_sources.values(): + for field in request_data.schema: + dtype = feast_value_type_to_pandas_type(field.dtype.to_value_type()) + df[f"{field.name}"] = pd.Series(dtype=dtype) + output_df: pd.DataFrame = self.udf.__call__(df) + inferred_features = [] + for f, dt in zip(output_df.columns, output_df.dtypes): + inferred_features.append( + Field( + name=f, + dtype=from_value_type( + python_type_to_feast_value_type(f, type_name=str(dt)) + ), + ) + ) + + if self.features: + missing_features = [] + for specified_features in self.features: + if specified_features not in inferred_features: + missing_features.append(specified_features) + if missing_features: + raise SpecifiedFeaturesNotPresentError( + [f.name for f in missing_features], self.name + ) + else: + self.features = inferred_features + + if not self.features: + raise RegistryInferenceFailure( + "OnDemandFeatureView", + f"Could not infer Features for the feature view '{self.name}'.", + ) + + @staticmethod + def get_requested_odfvs(feature_refs, project, registry): + all_on_demand_feature_views = registry.list_on_demand_feature_views( + project, allow_cache=True + ) + requested_on_demand_feature_views: List[OnDemandFeatureView] = [] + for odfv in all_on_demand_feature_views: + for feature in odfv.features: + if f"{odfv.name}:{feature.name}" in feature_refs: + requested_on_demand_feature_views.append(odfv) + break + return requested_on_demand_feature_views + + +# TODO(felixwang9817): Force this decorator to accept kwargs and switch from +# `features` to `schema`. +def on_demand_feature_view( + *args, + features: Optional[List[Feature]] = None, + sources: Optional[Dict[str, Union[FeatureView, RequestSource]]] = None, + inputs: Optional[Dict[str, Union[FeatureView, RequestSource]]] = None, + schema: Optional[List[Field]] = None, + description: str = "", + tags: Optional[Dict[str, str]] = None, + owner: str = "", +): + """ + Creates an OnDemandFeatureView object with the given user function as udf. + + Args: + features (deprecated): The list of features in the output of the on demand + feature view, after the transformation has been applied. + sources (optional): A map from input source names to the actual input sources, + which may be feature views, feature view projections, or request data sources. + These sources serve as inputs to the udf, which will refer to them by name. + inputs (optional): A map from input source names to the actual input sources, + which may be feature views, feature view projections, or request data sources. + These sources serve as inputs to the udf, which will refer to them by name. + schema (optional): The list of features in the output of the on demand feature + view, after the transformation has been applied. + description (optional): A human-readable description. + tags (optional): A dictionary of key-value pairs to store arbitrary metadata. + owner (optional): The owner of the on demand feature view, typically the email + of the primary maintainer. + """ + positional_attributes = ["features", "inputs"] + + _schema = schema or [] + if len(_schema) == 0 and features is not None: + _schema = [Field.from_feature(feature) for feature in features] + if features is not None: + warnings.warn( + ( + "The `features` parameter is being deprecated in favor of the `schema` parameter. " + "Please switch from using `features` to `schema`. This will also requiring switching " + "feature definitions from using `Feature` to `Field`. Feast 0.21 and onwards will not " + "support the `features` parameter." + ), + DeprecationWarning, + ) + + _sources = sources or inputs + if inputs and sources: + raise ValueError("At most one of `sources` or `inputs` can be specified.") + elif inputs: + warnings.warn( + ( + "The `inputs` parameter is being deprecated. Please use `sources` instead. " + "Feast 0.21 and onwards will not support the `inputs` parameter." + ), + DeprecationWarning, + ) + + if args: + warnings.warn( + ( + "On demand feature view parameters should be specified as keyword arguments " + "instead of positional arguments. Feast 0.23 and onwards will not support " + "positional arguments in on demand feature view definitions." + ), + DeprecationWarning, + ) + if len(args) > len(positional_attributes): + raise ValueError( + f"Only {', '.join(positional_attributes)} are allowed as positional args " + f"when defining feature views, for backwards compatibility." + ) + if len(args) >= 1: + _schema = args[0] + # Convert Features to Fields. + if len(_schema) > 0 and isinstance(_schema[0], Feature): + _schema = [Field.from_feature(feature) for feature in _schema] + warnings.warn( + ( + "The `features` parameter is being deprecated in favor of the `schema` parameter. " + "Please switch from using `features` to `schema`. This will also requiring switching " + "feature definitions from using `Feature` to `Field`. Feast 0.21 and onwards will not " + "support the `features` parameter." + ), + DeprecationWarning, + ) + if len(args) >= 2: + _sources = args[1] + warnings.warn( + ( + "The `inputs` parameter is being deprecated. Please use `sources` instead. " + "Feast 0.21 and onwards will not support the `inputs` parameter." + ), + DeprecationWarning, + ) + + if not _sources: + raise ValueError("The `sources` parameter must be specified.") + + def decorator(user_function): + on_demand_feature_view_obj = OnDemandFeatureView( + name=user_function.__name__, + sources=_sources, + schema=_schema, + udf=user_function, + description=description, + tags=tags, + owner=owner, + ) + functools.update_wrapper( + wrapper=on_demand_feature_view_obj, wrapped=user_function + ) + return on_demand_feature_view_obj + + return decorator diff --git a/sdk/python/feast/online_response.py b/sdk/python/feast/online_response.py index ac936a41a3..48524359bf 100644 --- a/sdk/python/feast/online_response.py +++ b/sdk/python/feast/online_response.py @@ -12,25 +12,20 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Dict, List, cast +from typing import Any, Dict, List import pandas as pd -from feast.protos.feast.serving.ServingService_pb2 import ( - GetOnlineFeaturesRequestV2, - GetOnlineFeaturesResponse, -) -from feast.protos.feast.types.Value_pb2 import Value as Value -from feast.type_map import ( - _python_value_to_proto_value, - feast_value_type_to_python_type, - python_type_to_feast_value_type, -) +from feast.feature_view import DUMMY_ENTITY_ID +from feast.protos.feast.serving.ServingService_pb2 import GetOnlineFeaturesResponse +from feast.type_map import feast_value_type_to_python_type + +TIMESTAMP_POSTFIX: str = "__ts" class OnlineResponse: """ - Defines a online response in feast. + Defines an online response in feast. """ def __init__(self, online_response_proto: GetOnlineFeaturesResponse): @@ -41,70 +36,44 @@ def __init__(self, online_response_proto: GetOnlineFeaturesResponse): online_response_proto: GetOnlineResponse proto object to construct from. """ self.proto = online_response_proto + # Delete DUMMY_ENTITY_ID from proto if it exists + for idx, val in enumerate(self.proto.metadata.feature_names.val): + if val == DUMMY_ENTITY_ID: + del self.proto.metadata.feature_names.val[idx] + del self.proto.results[idx] - @property - def field_values(self): - """ - Getter for GetOnlineResponse's field_values. - """ - return self.proto.field_values + break - def to_dict(self) -> Dict[str, Any]: + def to_dict(self, include_event_timestamps: bool = False) -> Dict[str, Any]: """ Converts GetOnlineFeaturesResponse features into a dictionary form. + + Args: + is_with_event_timestamps: bool Optionally include feature timestamps in the dictionary """ - fields = [k for row in self.field_values for k, _ in row.statuses.items()] - features_dict: Dict[str, List[Any]] = {k: list() for k in fields} + response: Dict[str, List[Any]] = {} + + for feature_ref, feature_vector in zip( + self.proto.metadata.feature_names.val, self.proto.results + ): + response[feature_ref] = [ + feast_value_type_to_python_type(v) for v in feature_vector.values + ] - for row in self.field_values: - for feature in features_dict.keys(): - native_type_value = feast_value_type_to_python_type(row.fields[feature]) - features_dict[feature].append(native_type_value) + if include_event_timestamps: + timestamp_ref = feature_ref + TIMESTAMP_POSTFIX + response[timestamp_ref] = [ + ts.seconds for ts in feature_vector.event_timestamps + ] - return features_dict + return response - def to_df(self) -> pd.DataFrame: + def to_df(self, include_event_timestamps: bool = False) -> pd.DataFrame: """ Converts GetOnlineFeaturesResponse features into Panda dataframe form. - """ - return pd.DataFrame(self.to_dict()) - - -def _infer_online_entity_rows( - entity_rows: List[Dict[str, Any]] -) -> List[GetOnlineFeaturesRequestV2.EntityRow]: - """ - Builds a list of EntityRow protos from Python native type format passed by user. - - Args: - entity_rows: A list of dictionaries where each key-value is an entity-name, entity-value pair. - Returns: - A list of EntityRow protos parsed from args. - """ + Args: + is_with_event_timestamps: bool Optionally include feature timestamps in the dataframe + """ - entity_rows_dicts = cast(List[Dict[str, Any]], entity_rows) - entity_row_list = [] - entity_type_map = dict() - - for entity in entity_rows_dicts: - fields = {} - for key, value in entity.items(): - # Allow for feast.types.Value - if isinstance(value, Value): - proto_value = value - else: - # Infer the specific type for this row - current_dtype = python_type_to_feast_value_type(name=key, value=value) - - if key not in entity_type_map: - entity_type_map[key] = current_dtype - else: - if current_dtype != entity_type_map[key]: - raise TypeError( - f"Input entity {key} has mixed types, {current_dtype} and {entity_type_map[key]}. That is not allowed. " - ) - proto_value = _python_value_to_proto_value(current_dtype, value) - fields[key] = proto_value - entity_row_list.append(GetOnlineFeaturesRequestV2.EntityRow(fields=fields)) - return entity_row_list + return pd.DataFrame(self.to_dict(include_event_timestamps)) diff --git a/sdk/python/feast/proto_json.py b/sdk/python/feast/proto_json.py new file mode 100644 index 0000000000..44e004cb03 --- /dev/null +++ b/sdk/python/feast/proto_json.py @@ -0,0 +1,198 @@ +import uuid +from typing import Any, Callable, Type + +from google.protobuf.json_format import ( # type: ignore + _WKTJSONMETHODS, + ParseError, + _Parser, + _Printer, +) + +from feast.protos.feast.serving.ServingService_pb2 import FeatureList +from feast.protos.feast.types.Value_pb2 import RepeatedValue, Value + +ProtoMessage = Any +JsonObject = Any + + +# TODO: These methods need to be updated when bumping the version of protobuf. +# https://github.com/feast-dev/feast/issues/2484 +def _patch_proto_json_encoding( + proto_type: Type[ProtoMessage], + to_json_object: Callable[[_Printer, ProtoMessage], JsonObject], + from_json_object: Callable[[_Parser, JsonObject, ProtoMessage], None], +) -> None: + """Patch Protobuf JSON Encoder / Decoder for a desired Protobuf type with to_json & from_json methods.""" + to_json_fn_name = "_" + uuid.uuid4().hex + from_json_fn_name = "_" + uuid.uuid4().hex + setattr(_Printer, to_json_fn_name, to_json_object) + setattr(_Parser, from_json_fn_name, from_json_object) + _WKTJSONMETHODS[proto_type.DESCRIPTOR.full_name] = [ + to_json_fn_name, + from_json_fn_name, + ] + + +def _patch_feast_value_json_encoding(): + """Patch Protobuf JSON Encoder / Decoder with a Feast Value type. + + This allows encoding the proto object as a native type, without the dummy structural wrapper. + + Here's a before example: + + { + "value_1": { + "int64_val": 1 + }, + "value_2": { + "double_list_val": [1.0, 2.0, 3.0] + }, + } + + And here's an after example: + + { + "value_1": 1, + "value_2": [1.0, 2.0, 3.0] + } + """ + + def to_json_object(printer: _Printer, message: ProtoMessage) -> JsonObject: + which = message.WhichOneof("val") + # If the Value message is not set treat as null_value when serialize + # to JSON. The parse back result will be different from original message. + if which is None or which == "null_val": + return None + elif "_list_" in which: + value = list(getattr(message, which).val) + else: + value = getattr(message, which) + return value + + def from_json_object( + parser: _Parser, value: JsonObject, message: ProtoMessage, + ) -> None: + if value is None: + message.null_val = 0 + elif isinstance(value, bool): + message.bool_val = value + elif isinstance(value, str): + message.string_val = value + elif isinstance(value, int): + message.int64_val = value + elif isinstance(value, float): + message.double_val = value + elif isinstance(value, list): + if len(value) == 0: + # Clear will mark the struct as modified so it will be created even if there are no values + message.int64_list_val.Clear() + elif isinstance(value[0], bool): + message.bool_list_val.val.extend(value) + elif isinstance(value[0], str): + message.string_list_val.val.extend(value) + elif isinstance(value[0], (float, int, type(None))): + # Identify array as ints if all of the elements are ints + if all(isinstance(item, int) for item in value): + message.int64_list_val.val.extend(value) + # If any of the elements are floats or nulls, then parse it as a float array + else: + # Convert each null as NaN. + message.double_list_val.val.extend( + [item if item is not None else float("nan") for item in value] + ) + else: + raise ParseError( + "Value {0} has unexpected type {1}.".format( + value[0], type(value[0]) + ) + ) + else: + raise ParseError( + "Value {0} has unexpected type {1}.".format(value, type(value)) + ) + + _patch_proto_json_encoding(Value, to_json_object, from_json_object) + + +def _patch_feast_repeated_value_json_encoding(): + """Patch Protobuf JSON Encoder / Decoder with a Feast RepeatedValue type. + + This allows list of lists without dummy field name "val". + + Here's a before example: + + { + "repeated_value": [ + {"val": [1,2,3]}, + {"val": [4,5,6]} + ] + } + + And here's an after example: + + { + "repeated_value": [ + [1,2,3], + [4,5,6] + ] + } + """ + + def to_json_object(printer: _Printer, message: ProtoMessage) -> JsonObject: + return [printer._MessageToJsonObject(item) for item in message.val] + + def from_json_object( + parser: _Parser, value: JsonObject, message: ProtoMessage, + ) -> None: + array = value if isinstance(value, list) else value["val"] + for item in array: + parser.ConvertMessage(item, message.val.add()) + + _patch_proto_json_encoding(RepeatedValue, to_json_object, from_json_object) + + +def _patch_feast_feature_list_json_encoding(): + """Patch Protobuf JSON Encoder / Decoder with a Feast FeatureList type. + + This allows list of lists without dummy field name "features". + + Here's a before example: + + { + "feature_list": { + "features": [ + "feature-1", + "feature-2", + "feature-3" + ] + } + } + + And here's an after example: + + { + "feature_list": [ + "feature-1", + "feature-2", + "feature-3" + ] + } + """ + + def to_json_object(printer: _Printer, message: ProtoMessage) -> JsonObject: + return list(message.val) + + def from_json_object( + parser: _Parser, value: JsonObject, message: ProtoMessage, + ) -> None: + array = value if isinstance(value, list) else value["val"] + message.val.extend(array) + + _patch_proto_json_encoding(FeatureList, to_json_object, from_json_object) + + +def patch(): + """Patch Protobuf JSON Encoder / Decoder with all desired Feast types.""" + _patch_feast_value_json_encoding() + _patch_feast_repeated_value_json_encoding() + _patch_feast_feature_list_json_encoding() diff --git a/sdk/python/feast/py.typed b/sdk/python/feast/py.typed new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/feast/registry.py b/sdk/python/feast/registry.py index d68972466a..5f5d27318a 100644 --- a/sdk/python/feast/registry.py +++ b/sdk/python/feast/registry.py @@ -11,31 +11,132 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -import os -import uuid -from abc import ABC, abstractmethod +import json +import logging +from collections import defaultdict from datetime import datetime, timedelta +from enum import Enum from pathlib import Path -from tempfile import TemporaryFile -from typing import List, Optional +from threading import Lock +from typing import Any, Dict, List, Optional from urllib.parse import urlparse +import dill +from google.protobuf.internal.containers import RepeatedCompositeFieldContainer +from google.protobuf.json_format import MessageToJson +from proto import Message + +from feast.base_feature_view import BaseFeatureView +from feast.data_source import DataSource from feast.entity import Entity from feast.errors import ( + ConflictingFeatureViewNames, + DataSourceNotFoundException, + DataSourceObjectNotFoundException, EntityNotFoundException, - FeatureTableNotFoundException, + FeatureServiceNotFoundException, FeatureViewNotFoundException, - S3RegistryBucketForbiddenAccess, - S3RegistryBucketNotExist, + OnDemandFeatureViewNotFoundException, + SavedDatasetNotFound, ) -from feast.feature_table import FeatureTable +from feast.feature_service import FeatureService from feast.feature_view import FeatureView +from feast.importer import import_class +from feast.infra.infra_object import Infra +from feast.on_demand_feature_view import OnDemandFeatureView from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto +from feast.registry_store import NoopRegistryStore +from feast.repo_config import RegistryConfig +from feast.repo_contents import RepoContents +from feast.request_feature_view import RequestFeatureView +from feast.saved_dataset import SavedDataset REGISTRY_SCHEMA_VERSION = "1" +REGISTRY_STORE_CLASS_FOR_TYPE = { + "GCSRegistryStore": "feast.infra.gcp.GCSRegistryStore", + "S3RegistryStore": "feast.infra.aws.S3RegistryStore", + "LocalRegistryStore": "feast.infra.local.LocalRegistryStore", +} + +REGISTRY_STORE_CLASS_FOR_SCHEME = { + "gs": "GCSRegistryStore", + "s3": "S3RegistryStore", + "file": "LocalRegistryStore", + "": "LocalRegistryStore", +} + + +class FeastObjectType(Enum): + DATA_SOURCE = "data source" + ENTITY = "entity" + FEATURE_VIEW = "feature view" + ON_DEMAND_FEATURE_VIEW = "on demand feature view" + REQUEST_FEATURE_VIEW = "request feature view" + FEATURE_SERVICE = "feature service" + + @staticmethod + def get_objects_from_registry( + registry: "Registry", project: str + ) -> Dict["FeastObjectType", List[Any]]: + return { + FeastObjectType.DATA_SOURCE: registry.list_data_sources(project=project), + FeastObjectType.ENTITY: registry.list_entities(project=project), + FeastObjectType.FEATURE_VIEW: registry.list_feature_views(project=project), + FeastObjectType.ON_DEMAND_FEATURE_VIEW: registry.list_on_demand_feature_views( + project=project + ), + FeastObjectType.REQUEST_FEATURE_VIEW: registry.list_request_feature_views( + project=project + ), + FeastObjectType.FEATURE_SERVICE: registry.list_feature_services( + project=project + ), + } + + @staticmethod + def get_objects_from_repo_contents( + repo_contents: RepoContents, + ) -> Dict["FeastObjectType", List[Any]]: + return { + FeastObjectType.DATA_SOURCE: repo_contents.data_sources, + FeastObjectType.ENTITY: repo_contents.entities, + FeastObjectType.FEATURE_VIEW: repo_contents.feature_views, + FeastObjectType.ON_DEMAND_FEATURE_VIEW: repo_contents.on_demand_feature_views, + FeastObjectType.REQUEST_FEATURE_VIEW: repo_contents.request_feature_views, + FeastObjectType.FEATURE_SERVICE: repo_contents.feature_services, + } + + +FEAST_OBJECT_TYPES = [feast_object_type for feast_object_type in FeastObjectType] + + +logger = logging.getLogger(__name__) + + +def get_registry_store_class_from_type(registry_store_type: str): + if not registry_store_type.endswith("RegistryStore"): + raise Exception('Registry store class name should end with "RegistryStore"') + if registry_store_type in REGISTRY_STORE_CLASS_FOR_TYPE: + registry_store_type = REGISTRY_STORE_CLASS_FOR_TYPE[registry_store_type] + module_name, registry_store_class_name = registry_store_type.rsplit(".", 1) + + return import_class(module_name, registry_store_class_name, "RegistryStore") + + +def get_registry_store_class_from_scheme(registry_path: str): + uri = urlparse(registry_path) + if uri.scheme not in REGISTRY_STORE_CLASS_FOR_SCHEME: + raise Exception( + f"Registry path {registry_path} has unsupported scheme {uri.scheme}. " + f"Supported schemes are file, s3 and gs." + ) + else: + registry_store_type = REGISTRY_STORE_CLASS_FOR_SCHEME[uri.scheme] + return get_registry_store_class_from_type(registry_store_type) + + class Registry: """ Registry: A registry allows for the management and persistence of feature definitions and related metadata. @@ -47,39 +148,86 @@ class Registry: cached_registry_proto: Optional[RegistryProto] = None cached_registry_proto_created: Optional[datetime] = None cached_registry_proto_ttl: timedelta - cache_being_updated: bool = False - def __init__(self, registry_path: str, repo_path: Path, cache_ttl: timedelta): + def __init__( + self, registry_config: Optional[RegistryConfig], repo_path: Optional[Path] + ): """ Create the Registry object. Args: + registry_config: RegistryConfig object containing the destination path and cache ttl, repo_path: Path to the base of the Feast repository - cache_ttl: The amount of time that cached registry state stays valid - registry_path: filepath or GCS URI that is the location of the object store registry, or where it will be created if it does not exist yet. """ - uri = urlparse(registry_path) - if uri.scheme == "gs": - self._registry_store: RegistryStore = GCSRegistryStore(registry_path) - elif uri.scheme == "s3": - self._registry_store = S3RegistryStore(registry_path) - elif uri.scheme == "file" or uri.scheme == "": - self._registry_store = LocalRegistryStore( - repo_path=repo_path, registry_path_string=registry_path - ) - else: - raise Exception( - f"Registry path {registry_path} has unsupported scheme {uri.scheme}. Supported schemes are file and gs." + + self._refresh_lock = Lock() + + if registry_config: + registry_store_type = registry_config.registry_store_type + registry_path = registry_config.path + if registry_store_type is None: + cls = get_registry_store_class_from_scheme(registry_path) + else: + cls = get_registry_store_class_from_type(str(registry_store_type)) + + self._registry_store = cls(registry_config, repo_path) + self.cached_registry_proto_ttl = timedelta( + seconds=registry_config.cache_ttl_seconds + if registry_config.cache_ttl_seconds is not None + else 0 ) - self.cached_registry_proto_ttl = cache_ttl - return + + def clone(self) -> "Registry": + new_registry = Registry(None, None) + new_registry.cached_registry_proto_ttl = timedelta(seconds=0) + new_registry.cached_registry_proto = ( + self.cached_registry_proto.__deepcopy__() + if self.cached_registry_proto + else RegistryProto() + ) + new_registry.cached_registry_proto_created = datetime.utcnow() + new_registry._registry_store = NoopRegistryStore() + return new_registry def _initialize_registry(self): - """Explicitly initializes the registry with an empty proto.""" - registry_proto = RegistryProto() - registry_proto.registry_schema_version = REGISTRY_SCHEMA_VERSION - self._registry_store.update_registry_proto(registry_proto) + """Explicitly initializes the registry with an empty proto if it doesn't exist.""" + try: + self._get_registry_proto() + except FileNotFoundError: + registry_proto = RegistryProto() + registry_proto.registry_schema_version = REGISTRY_SCHEMA_VERSION + self._registry_store.update_registry_proto(registry_proto) + + def update_infra(self, infra: Infra, project: str, commit: bool = True): + """ + Updates the stored Infra object. + + Args: + infra: The new Infra object to be stored. + project: Feast project that the Infra object refers to + commit: Whether the change should be persisted immediately + """ + self._prepare_registry_for_changes() + assert self.cached_registry_proto + + self.cached_registry_proto.infra.CopyFrom(infra.to_proto()) + if commit: + self.commit() + + def get_infra(self, project: str, allow_cache: bool = False) -> Infra: + """ + Retrieves the stored Infra object. + + Args: + project: Feast project that the Infra object refers to + allow_cache: Whether to allow returning this entity from a cached registry + + Returns: + The stored Infra object. + """ + registry_proto = self._get_registry_proto(allow_cache=allow_cache) + return Infra.from_proto(registry_proto.infra) def apply_entity(self, entity: Entity, project: str, commit: bool = True): """ @@ -91,6 +239,12 @@ def apply_entity(self, entity: Entity, project: str, commit: bool = True): commit: Whether the change should be persisted immediately """ entity.is_valid() + + now = datetime.utcnow() + if not entity.created_timestamp: + entity.created_timestamp = now + entity.last_updated_timestamp = now + entity_proto = entity.to_proto() entity_proto.spec.project = project self._prepare_registry_for_changes() @@ -109,7 +263,6 @@ def apply_entity(self, entity: Entity, project: str, commit: bool = True): self.cached_registry_proto.entities.append(entity_proto) if commit: self.commit() - return def list_entities(self, project: str, allow_cache: bool = False) -> List[Entity]: """ @@ -129,57 +282,175 @@ def list_entities(self, project: str, allow_cache: bool = False) -> List[Entity] entities.append(Entity.from_proto(entity_proto)) return entities - def get_entity(self, name: str, project: str, allow_cache: bool = False) -> Entity: + def list_data_sources( + self, project: str, allow_cache: bool = False + ) -> List[DataSource]: """ - Retrieves an entity. + Retrieve a list of data sources from the registry Args: - name: Name of entity - project: Feast project that this entity belongs to + project: Filter data source based on project name + allow_cache: Whether to allow returning data sources from a cached registry Returns: - Returns either the specified entity, or raises an exception if - none is found + List of data sources """ registry_proto = self._get_registry_proto(allow_cache=allow_cache) - for entity_proto in registry_proto.entities: - if entity_proto.spec.name == name and entity_proto.spec.project == project: - return Entity.from_proto(entity_proto) - raise EntityNotFoundException(name, project=project) - - def apply_feature_table( - self, feature_table: FeatureTable, project: str, commit: bool = True + data_sources = [] + for data_source_proto in registry_proto.data_sources: + if data_source_proto.project == project: + data_sources.append(DataSource.from_proto(data_source_proto)) + return data_sources + + def apply_data_source( + self, data_source: DataSource, project: str, commit: bool = True ): """ - Registers a single feature table with Feast + Registers a single data source with Feast + + Args: + data_source: A data source that will be registered + project: Feast project that this data source belongs to + commit: Whether to immediately commit to the registry + """ + registry = self._prepare_registry_for_changes() + for idx, existing_data_source_proto in enumerate(registry.data_sources): + if existing_data_source_proto.name == data_source.name: + del registry.data_sources[idx] + data_source_proto = data_source.to_proto() + data_source_proto.data_source_class_type = ( + f"{data_source.__class__.__module__}.{data_source.__class__.__name__}" + ) + data_source_proto.project = project + data_source_proto.data_source_class_type = ( + f"{data_source.__class__.__module__}.{data_source.__class__.__name__}" + ) + registry.data_sources.append(data_source_proto) + if commit: + self.commit() + + def delete_data_source(self, name: str, project: str, commit: bool = True): + """ + Deletes a data source or raises an exception if not found. Args: - feature_table: Feature table that will be registered - project: Feast project that this feature table belongs to + name: Name of data source + project: Feast project that this data source belongs to commit: Whether the change should be persisted immediately """ - feature_table.is_valid() - feature_table_proto = feature_table.to_proto() - feature_table_proto.spec.project = project self._prepare_registry_for_changes() assert self.cached_registry_proto - for idx, existing_feature_table_proto in enumerate( - self.cached_registry_proto.feature_tables + for idx, data_source_proto in enumerate( + self.cached_registry_proto.data_sources ): + if data_source_proto.name == name: + del self.cached_registry_proto.data_sources[idx] + if commit: + self.commit() + return + raise DataSourceNotFoundException(name) + + def apply_feature_service( + self, feature_service: FeatureService, project: str, commit: bool = True + ): + """ + Registers a single feature service with Feast + + Args: + feature_service: A feature service that will be registered + project: Feast project that this entity belongs to + """ + now = datetime.utcnow() + if not feature_service.created_timestamp: + feature_service.created_timestamp = now + feature_service.last_updated_timestamp = now + + feature_service_proto = feature_service.to_proto() + feature_service_proto.spec.project = project + + registry = self._prepare_registry_for_changes() + + for idx, existing_feature_service_proto in enumerate(registry.feature_services): if ( - existing_feature_table_proto.spec.name == feature_table_proto.spec.name - and existing_feature_table_proto.spec.project == project + existing_feature_service_proto.spec.name + == feature_service_proto.spec.name + and existing_feature_service_proto.spec.project == project ): - del self.cached_registry_proto.feature_tables[idx] - break - - self.cached_registry_proto.feature_tables.append(feature_table_proto) + del registry.feature_services[idx] + registry.feature_services.append(feature_service_proto) if commit: self.commit() + def list_feature_services( + self, project: str, allow_cache: bool = False + ) -> List[FeatureService]: + """ + Retrieve a list of feature services from the registry + + Args: + allow_cache: Whether to allow returning entities from a cached registry + project: Filter entities based on project name + + Returns: + List of feature services + """ + + registry = self._get_registry_proto(allow_cache=allow_cache) + feature_services = [] + for feature_service_proto in registry.feature_services: + if feature_service_proto.spec.project == project: + feature_services.append( + FeatureService.from_proto(feature_service_proto) + ) + return feature_services + + def get_feature_service( + self, name: str, project: str, allow_cache: bool = False + ) -> FeatureService: + """ + Retrieves a feature service. + + Args: + name: Name of feature service + project: Feast project that this feature service belongs to + allow_cache: Whether to allow returning this feature service from a cached registry + + Returns: + Returns either the specified feature service, or raises an exception if + none is found + """ + registry = self._get_registry_proto(allow_cache=allow_cache) + + for feature_service_proto in registry.feature_services: + if ( + feature_service_proto.spec.project == project + and feature_service_proto.spec.name == name + ): + return FeatureService.from_proto(feature_service_proto) + raise FeatureServiceNotFoundException(name, project=project) + + def get_entity(self, name: str, project: str, allow_cache: bool = False) -> Entity: + """ + Retrieves an entity. + + Args: + name: Name of entity + project: Feast project that this entity belongs to + allow_cache: Whether to allow returning this entity from a cached registry + + Returns: + Returns either the specified entity, or raises an exception if + none is found + """ + registry_proto = self._get_registry_proto(allow_cache=allow_cache) + for entity_proto in registry_proto.entities: + if entity_proto.spec.name == name and entity_proto.spec.project == project: + return Entity.from_proto(entity_proto) + raise EntityNotFoundException(name, project=project) + def apply_feature_view( - self, feature_view: FeatureView, project: str, commit: bool = True + self, feature_view: BaseFeatureView, project: str, commit: bool = True ): """ Registers a single feature view with Feast @@ -189,29 +460,124 @@ def apply_feature_view( project: Feast project that this feature view belongs to commit: Whether the change should be persisted immediately """ - feature_view.is_valid() + feature_view.ensure_valid() + + now = datetime.utcnow() + if not feature_view.created_timestamp: + feature_view.created_timestamp = now + feature_view.last_updated_timestamp = now + feature_view_proto = feature_view.to_proto() feature_view_proto.spec.project = project self._prepare_registry_for_changes() assert self.cached_registry_proto + self._check_conflicting_feature_view_names(feature_view) + existing_feature_views_of_same_type: RepeatedCompositeFieldContainer + if isinstance(feature_view, FeatureView): + existing_feature_views_of_same_type = ( + self.cached_registry_proto.feature_views + ) + elif isinstance(feature_view, OnDemandFeatureView): + existing_feature_views_of_same_type = ( + self.cached_registry_proto.on_demand_feature_views + ) + elif isinstance(feature_view, RequestFeatureView): + existing_feature_views_of_same_type = ( + self.cached_registry_proto.request_feature_views + ) + else: + raise ValueError(f"Unexpected feature view type: {type(feature_view)}") + for idx, existing_feature_view_proto in enumerate( - self.cached_registry_proto.feature_views + existing_feature_views_of_same_type ): if ( existing_feature_view_proto.spec.name == feature_view_proto.spec.name and existing_feature_view_proto.spec.project == project ): - if FeatureView.from_proto(existing_feature_view_proto) == feature_view: + if ( + feature_view.__class__.from_proto(existing_feature_view_proto) + == feature_view + ): return else: - del self.cached_registry_proto.feature_views[idx] + del existing_feature_views_of_same_type[idx] break - self.cached_registry_proto.feature_views.append(feature_view_proto) + existing_feature_views_of_same_type.append(feature_view_proto) if commit: self.commit() + def list_on_demand_feature_views( + self, project: str, allow_cache: bool = False + ) -> List[OnDemandFeatureView]: + """ + Retrieve a list of on demand feature views from the registry + + Args: + project: Filter on demand feature views based on project name + allow_cache: Whether to allow returning on demand feature views from a cached registry + + Returns: + List of on demand feature views + """ + + registry = self._get_registry_proto(allow_cache=allow_cache) + on_demand_feature_views = [] + for on_demand_feature_view in registry.on_demand_feature_views: + if on_demand_feature_view.spec.project == project: + on_demand_feature_views.append( + OnDemandFeatureView.from_proto(on_demand_feature_view) + ) + return on_demand_feature_views + + def get_on_demand_feature_view( + self, name: str, project: str, allow_cache: bool = False + ) -> OnDemandFeatureView: + """ + Retrieves an on demand feature view. + + Args: + name: Name of on demand feature view + project: Feast project that this on demand feature view belongs to + allow_cache: Whether to allow returning this on demand feature view from a cached registry + + Returns: + Returns either the specified on demand feature view, or raises an exception if + none is found + """ + registry = self._get_registry_proto(allow_cache=allow_cache) + + for on_demand_feature_view in registry.on_demand_feature_views: + if ( + on_demand_feature_view.spec.project == project + and on_demand_feature_view.spec.name == name + ): + return OnDemandFeatureView.from_proto(on_demand_feature_view) + raise OnDemandFeatureViewNotFoundException(name, project=project) + + def get_data_source( + self, name: str, project: str, allow_cache: bool = False + ) -> DataSource: + """ + Retrieves a data source. + + Args: + name: Name of data source + project: Feast project that this data source belongs to + allow_cache: Whether to allow returning this data source from a cached registry + + Returns: + Returns either the specified data source, or raises an exception if none is found + """ + registry = self._get_registry_proto(allow_cache=allow_cache) + + for data_source in registry.data_sources: + if data_source.project == project and data_source.name == name: + return DataSource.from_proto(data_source) + raise DataSourceObjectNotFoundException(name, project=project) + def apply_materialization( self, feature_view: FeatureView, @@ -246,6 +612,7 @@ def apply_materialization( existing_feature_view.materialization_intervals.append( (start_date, end_date) ) + existing_feature_view.last_updated_timestamp = datetime.utcnow() feature_view_proto = existing_feature_view.to_proto() feature_view_proto.spec.project = project del self.cached_registry_proto.feature_views[idx] @@ -256,23 +623,6 @@ def apply_materialization( raise FeatureViewNotFoundException(feature_view.name, project) - def list_feature_tables(self, project: str) -> List[FeatureTable]: - """ - Retrieve a list of feature tables from the registry - - Args: - project: Filter feature tables based on project name - - Returns: - List of feature tables - """ - registry_proto = self._get_registry_proto() - feature_tables = [] - for feature_table_proto in registry_proto.feature_tables: - if feature_table_proto.spec.project == project: - feature_tables.append(FeatureTable.from_proto(feature_table_proto)) - return feature_tables - def list_feature_views( self, project: str, allow_cache: bool = False ) -> List[FeatureView]: @@ -281,52 +631,56 @@ def list_feature_views( Args: allow_cache: Allow returning feature views from the cached registry - project: Filter feature tables based on project name + project: Filter feature views based on project name Returns: List of feature views """ registry_proto = self._get_registry_proto(allow_cache=allow_cache) - feature_views = [] + feature_views: List[FeatureView] = [] for feature_view_proto in registry_proto.feature_views: if feature_view_proto.spec.project == project: feature_views.append(FeatureView.from_proto(feature_view_proto)) return feature_views - def get_feature_table(self, name: str, project: str) -> FeatureTable: + def list_request_feature_views( + self, project: str, allow_cache: bool = False + ) -> List[RequestFeatureView]: """ - Retrieves a feature table. + Retrieve a list of request feature views from the registry Args: - name: Name of feature table - project: Feast project that this feature table belongs to + allow_cache: Allow returning feature views from the cached registry + project: Filter feature views based on project name Returns: - Returns either the specified feature table, or raises an exception if - none is found + List of feature views """ - registry_proto = self._get_registry_proto() - for feature_table_proto in registry_proto.feature_tables: - if ( - feature_table_proto.spec.name == name - and feature_table_proto.spec.project == project - ): - return FeatureTable.from_proto(feature_table_proto) - raise FeatureTableNotFoundException(name, project) + registry_proto = self._get_registry_proto(allow_cache=allow_cache) + feature_views: List[RequestFeatureView] = [] + for request_feature_view_proto in registry_proto.request_feature_views: + if request_feature_view_proto.spec.project == project: + feature_views.append( + RequestFeatureView.from_proto(request_feature_view_proto) + ) + return feature_views - def get_feature_view(self, name: str, project: str) -> FeatureView: + def get_feature_view( + self, name: str, project: str, allow_cache: bool = False + ) -> FeatureView: """ Retrieves a feature view. Args: name: Name of feature view project: Feast project that this feature view belongs to + allow_cache: Allow returning feature view from the cached registry Returns: Returns either the specified feature view, or raises an exception if none is found """ - registry_proto = self._get_registry_proto() + registry_proto = self._get_registry_proto(allow_cache=allow_cache) for feature_view_proto in registry_proto.feature_views: if ( feature_view_proto.spec.name == name @@ -335,31 +689,30 @@ def get_feature_view(self, name: str, project: str) -> FeatureView: return FeatureView.from_proto(feature_view_proto) raise FeatureViewNotFoundException(name, project) - def delete_feature_table(self, name: str, project: str, commit: bool = True): + def delete_feature_service(self, name: str, project: str, commit: bool = True): """ - Deletes a feature table or raises an exception if not found. + Deletes a feature service or raises an exception if not found. Args: - name: Name of feature table - project: Feast project that this feature table belongs to + name: Name of feature service + project: Feast project that this feature service belongs to commit: Whether the change should be persisted immediately """ self._prepare_registry_for_changes() assert self.cached_registry_proto - for idx, existing_feature_table_proto in enumerate( - self.cached_registry_proto.feature_tables + for idx, feature_service_proto in enumerate( + self.cached_registry_proto.feature_services ): if ( - existing_feature_table_proto.spec.name == name - and existing_feature_table_proto.spec.project == project + feature_service_proto.spec.name == name + and feature_service_proto.spec.project == project ): - del self.cached_registry_proto.feature_tables[idx] + del self.cached_registry_proto.feature_services[idx] if commit: self.commit() return - - raise FeatureTableNotFoundException(name, project) + raise FeatureServiceNotFoundException(name, project) def delete_feature_view(self, name: str, project: str, commit: bool = True): """ @@ -385,233 +738,280 @@ def delete_feature_view(self, name: str, project: str, commit: bool = True): self.commit() return + for idx, existing_request_feature_view_proto in enumerate( + self.cached_registry_proto.request_feature_views + ): + if ( + existing_request_feature_view_proto.spec.name == name + and existing_request_feature_view_proto.spec.project == project + ): + del self.cached_registry_proto.request_feature_views[idx] + if commit: + self.commit() + return + + for idx, existing_on_demand_feature_view_proto in enumerate( + self.cached_registry_proto.on_demand_feature_views + ): + if ( + existing_on_demand_feature_view_proto.spec.name == name + and existing_on_demand_feature_view_proto.spec.project == project + ): + del self.cached_registry_proto.on_demand_feature_views[idx] + if commit: + self.commit() + return + raise FeatureViewNotFoundException(name, project) - def commit(self): - """Commits the state of the registry cache to the remote registry store.""" - if self.cached_registry_proto: - self._registry_store.update_registry_proto(self.cached_registry_proto) + def delete_entity(self, name: str, project: str, commit: bool = True): + """ + Deletes an entity or raises an exception if not found. - def refresh(self): - """Refreshes the state of the registry cache by fetching the registry state from the remote registry store.""" - self._get_registry_proto(allow_cache=False) + Args: + name: Name of entity + project: Feast project that this entity belongs to + commit: Whether the change should be persisted immediately + """ + self._prepare_registry_for_changes() + assert self.cached_registry_proto - def _prepare_registry_for_changes(self): - """Prepares the Registry for changes by refreshing the cache if necessary.""" - try: - self._get_registry_proto(allow_cache=True) - except FileNotFoundError: - registry_proto = RegistryProto() - registry_proto.registry_schema_version = REGISTRY_SCHEMA_VERSION - self.cached_registry_proto = registry_proto - self.cached_registry_proto_created = datetime.now() - return self.cached_registry_proto + for idx, existing_entity_proto in enumerate( + self.cached_registry_proto.entities + ): + if ( + existing_entity_proto.spec.name == name + and existing_entity_proto.spec.project == project + ): + del self.cached_registry_proto.entities[idx] + if commit: + self.commit() + return - def _get_registry_proto(self, allow_cache: bool = False) -> RegistryProto: - """Returns the cached or remote registry state + raise EntityNotFoundException(name, project) - Args: - allow_cache: Whether to allow the use of the registry cache when fetching the RegistryProto + def apply_saved_dataset( + self, saved_dataset: SavedDataset, project: str, commit: bool = True, + ): + """ + Registers a single entity with Feast - Returns: Returns a RegistryProto object which represents the state of the registry + Args: + saved_dataset: SavedDataset that will be added / updated to registry + project: Feast project that this dataset belongs to + commit: Whether the change should be persisted immediately """ - expired = ( - self.cached_registry_proto is None - or self.cached_registry_proto_created is None - ) or ( - self.cached_registry_proto_ttl.total_seconds() > 0 # 0 ttl means infinity - and ( - datetime.now() - > (self.cached_registry_proto_created + self.cached_registry_proto_ttl) - ) - ) - if allow_cache and (not expired or self.cache_being_updated): - assert isinstance(self.cached_registry_proto, RegistryProto) - return self.cached_registry_proto + now = datetime.utcnow() + if not saved_dataset.created_timestamp: + saved_dataset.created_timestamp = now + saved_dataset.last_updated_timestamp = now - try: - self.cache_being_updated = True - registry_proto = self._registry_store.get_registry_proto() - self.cached_registry_proto = registry_proto - self.cached_registry_proto_created = datetime.now() - except Exception as e: - raise e - finally: - self.cache_being_updated = False - return registry_proto + saved_dataset_proto = saved_dataset.to_proto() + saved_dataset_proto.spec.project = project + self._prepare_registry_for_changes() + assert self.cached_registry_proto + for idx, existing_saved_dataset_proto in enumerate( + self.cached_registry_proto.saved_datasets + ): + if ( + existing_saved_dataset_proto.spec.name == saved_dataset_proto.spec.name + and existing_saved_dataset_proto.spec.project == project + ): + del self.cached_registry_proto.saved_datasets[idx] + break -class RegistryStore(ABC): - """ - RegistryStore: abstract base class implemented by specific backends (local file system, GCS) - containing lower level methods used by the Registry class that are backend-specific. - """ + self.cached_registry_proto.saved_datasets.append(saved_dataset_proto) + if commit: + self.commit() - @abstractmethod - def get_registry_proto(self): + def get_saved_dataset( + self, name: str, project: str, allow_cache: bool = False + ) -> SavedDataset: """ - Retrieves the registry proto from the registry path. If there is no file at that path, - raises a FileNotFoundError. + Retrieves a saved dataset. + + Args: + name: Name of dataset + project: Feast project that this dataset belongs to + allow_cache: Whether to allow returning this dataset from a cached registry Returns: - Returns either the registry proto stored at the registry path, or an empty registry proto. + Returns either the specified SavedDataset, or raises an exception if + none is found """ - pass + registry_proto = self._get_registry_proto(allow_cache=allow_cache) + for saved_dataset in registry_proto.saved_datasets: + if ( + saved_dataset.spec.name == name + and saved_dataset.spec.project == project + ): + return SavedDataset.from_proto(saved_dataset) + raise SavedDatasetNotFound(name, project=project) - @abstractmethod - def update_registry_proto(self, registry_proto: RegistryProto): + def list_saved_datasets( + self, project: str, allow_cache: bool = False + ) -> List[SavedDataset]: """ - Overwrites the current registry proto with the proto passed in. This method - writes to the registry path. + Retrieves a list of all saved datasets in specified project Args: - registry_proto: the new RegistryProto + project: Feast project + allow_cache: Whether to allow returning this dataset from a cached registry + + Returns: + Returns the list of SavedDatasets """ - pass + registry_proto = self._get_registry_proto(allow_cache=allow_cache) + return [ + SavedDataset.from_proto(saved_dataset) + for saved_dataset in registry_proto.saved_datasets + if saved_dataset.spec.project == project + ] + def commit(self): + """Commits the state of the registry cache to the remote registry store.""" + if self.cached_registry_proto: + self._registry_store.update_registry_proto(self.cached_registry_proto) -class LocalRegistryStore(RegistryStore): - def __init__(self, repo_path: Path, registry_path_string: str): - registry_path = Path(registry_path_string) - if registry_path.is_absolute(): - self._filepath = registry_path - else: - self._filepath = repo_path.joinpath(registry_path) + def refresh(self): + """Refreshes the state of the registry cache by fetching the registry state from the remote registry store.""" + self._get_registry_proto(allow_cache=False) - def get_registry_proto(self): - registry_proto = RegistryProto() - if self._filepath.exists(): - registry_proto.ParseFromString(self._filepath.read_bytes()) - return registry_proto - raise FileNotFoundError( - f'Registry not found at path "{self._filepath}". Have you run "feast apply"?' - ) + def teardown(self): + """Tears down (removes) the registry.""" + self._registry_store.teardown() + + def to_dict(self, project: str) -> Dict[str, List[Any]]: + """Returns a dictionary representation of the registry contents for the specified project. - def update_registry_proto(self, registry_proto: RegistryProto): - self._write_registry(registry_proto) - return + For each list in the dictionary, the elements are sorted by name, so this + method can be used to compare two registries. - def _write_registry(self, registry_proto: RegistryProto): - registry_proto.version_id = str(uuid.uuid4()) - registry_proto.last_updated.FromDatetime(datetime.utcnow()) - file_dir = self._filepath.parent - file_dir.mkdir(exist_ok=True) - self._filepath.write_bytes(registry_proto.SerializeToString()) - return + Args: + project: Feast project to convert to a dict + """ + registry_dict: Dict[str, Any] = defaultdict(list) + registry_dict["project"] = project + for data_source in sorted( + self.list_data_sources(project=project), key=lambda ds: ds.name + ): + registry_dict["dataSources"].append( + self._message_to_sorted_dict(data_source.to_proto()) + ) + for entity in sorted( + self.list_entities(project=project), key=lambda entity: entity.name + ): + registry_dict["entities"].append( + self._message_to_sorted_dict(entity.to_proto()) + ) + for feature_view in sorted( + self.list_feature_views(project=project), + key=lambda feature_view: feature_view.name, + ): + registry_dict["featureViews"].append( + self._message_to_sorted_dict(feature_view.to_proto()) + ) + for feature_service in sorted( + self.list_feature_services(project=project), + key=lambda feature_service: feature_service.name, + ): + registry_dict["featureServices"].append( + self._message_to_sorted_dict(feature_service.to_proto()) + ) + for on_demand_feature_view in sorted( + self.list_on_demand_feature_views(project=project), + key=lambda on_demand_feature_view: on_demand_feature_view.name, + ): + odfv_dict = self._message_to_sorted_dict(on_demand_feature_view.to_proto()) + odfv_dict["spec"]["userDefinedFunction"]["body"] = dill.source.getsource( + on_demand_feature_view.udf + ) + registry_dict["onDemandFeatureViews"].append(odfv_dict) + for request_feature_view in sorted( + self.list_request_feature_views(project=project), + key=lambda request_feature_view: request_feature_view.name, + ): + registry_dict["requestFeatureViews"].append( + self._message_to_sorted_dict(request_feature_view.to_proto()) + ) + for saved_dataset in sorted( + self.list_saved_datasets(project=project), key=lambda item: item.name + ): + registry_dict["savedDatasets"].append( + self._message_to_sorted_dict(saved_dataset.to_proto()) + ) + for infra_object in sorted(self.get_infra(project=project).infra_objects): + registry_dict["infra"].append( + self._message_to_sorted_dict(infra_object.to_proto()) + ) + return registry_dict + @staticmethod + def _message_to_sorted_dict(message: Message) -> Dict[str, Any]: + return json.loads(MessageToJson(message, sort_keys=True)) -class GCSRegistryStore(RegistryStore): - def __init__(self, uri: str): + def _prepare_registry_for_changes(self): + """Prepares the Registry for changes by refreshing the cache if necessary.""" try: - from google.cloud import storage - except ImportError as e: - from feast.errors import FeastExtrasDependencyImportError - - raise FeastExtrasDependencyImportError("gcp", str(e)) + self._get_registry_proto(allow_cache=True) + except FileNotFoundError: + registry_proto = RegistryProto() + registry_proto.registry_schema_version = REGISTRY_SCHEMA_VERSION + self.cached_registry_proto = registry_proto + self.cached_registry_proto_created = datetime.utcnow() + return self.cached_registry_proto - self.gcs_client = storage.Client() - self._uri = urlparse(uri) - self._bucket = self._uri.hostname - self._blob = self._uri.path.lstrip("/") - return + def _get_registry_proto(self, allow_cache: bool = False) -> RegistryProto: + """Returns the cached or remote registry state - def get_registry_proto(self): - from google.cloud import storage - from google.cloud.exceptions import NotFound + Args: + allow_cache: Whether to allow the use of the registry cache when fetching the RegistryProto - file_obj = TemporaryFile() - registry_proto = RegistryProto() - try: - bucket = self.gcs_client.get_bucket(self._bucket) - except NotFound: - raise Exception( - f"No bucket named {self._bucket} exists; please create it first." - ) - if storage.Blob(bucket=bucket, name=self._blob).exists(self.gcs_client): - self.gcs_client.download_blob_to_file( - self._uri.geturl(), file_obj, timeout=30 + Returns: Returns a RegistryProto object which represents the state of the registry + """ + with self._refresh_lock: + expired = ( + self.cached_registry_proto is None + or self.cached_registry_proto_created is None + ) or ( + self.cached_registry_proto_ttl.total_seconds() + > 0 # 0 ttl means infinity + and ( + datetime.utcnow() + > ( + self.cached_registry_proto_created + + self.cached_registry_proto_ttl + ) + ) ) - file_obj.seek(0) - registry_proto.ParseFromString(file_obj.read()) - return registry_proto - raise FileNotFoundError( - f'Registry not found at path "{self._uri.geturl()}". Have you run "feast apply"?' - ) - - def update_registry_proto(self, registry_proto: RegistryProto): - self._write_registry(registry_proto) - return - - def _write_registry(self, registry_proto: RegistryProto): - registry_proto.version_id = str(uuid.uuid4()) - registry_proto.last_updated.FromDatetime(datetime.utcnow()) - # we have already checked the bucket exists so no need to do it again - gs_bucket = self.gcs_client.get_bucket(self._bucket) - blob = gs_bucket.blob(self._blob) - file_obj = TemporaryFile() - file_obj.write(registry_proto.SerializeToString()) - file_obj.seek(0) - blob.upload_from_file(file_obj) - return - - -class S3RegistryStore(RegistryStore): - def __init__(self, uri: str): - try: - import boto3 - except ImportError as e: - from feast.errors import FeastExtrasDependencyImportError - raise FeastExtrasDependencyImportError("aws", str(e)) - self._uri = urlparse(uri) - self._bucket = self._uri.hostname - self._key = self._uri.path.lstrip("/") + if allow_cache and not expired: + assert isinstance(self.cached_registry_proto, RegistryProto) + return self.cached_registry_proto - self.s3_client = boto3.resource( - "s3", endpoint_url=os.environ.get("FEAST_S3_ENDPOINT_URL") - ) + registry_proto = self._registry_store.get_registry_proto() + self.cached_registry_proto = registry_proto + self.cached_registry_proto_created = datetime.utcnow() - def get_registry_proto(self): - file_obj = TemporaryFile() - registry_proto = RegistryProto() - try: - from botocore.exceptions import ClientError - except ImportError as e: - from feast.errors import FeastExtrasDependencyImportError + return registry_proto - raise FeastExtrasDependencyImportError("aws", str(e)) - try: - bucket = self.s3_client.Bucket(self._bucket) - self.s3_client.meta.client.head_bucket(Bucket=bucket.name) - except ClientError as e: - # If a client error is thrown, then check that it was a 404 error. - # If it was a 404 error, then the bucket does not exist. - error_code = int(e.response["Error"]["Code"]) - if error_code == 404: - raise S3RegistryBucketNotExist(self._bucket) - else: - raise S3RegistryBucketForbiddenAccess(self._bucket) from e + def _check_conflicting_feature_view_names(self, feature_view: BaseFeatureView): + name_to_fv_protos = self._existing_feature_view_names_to_fvs() + if feature_view.name in name_to_fv_protos: + if not isinstance( + name_to_fv_protos.get(feature_view.name), feature_view.proto_class + ): + raise ConflictingFeatureViewNames(feature_view.name) - try: - obj = bucket.Object(self._key) - obj.download_fileobj(file_obj) - file_obj.seek(0) - registry_proto.ParseFromString(file_obj.read()) - return registry_proto - except ClientError as e: - raise FileNotFoundError( - f"Error while trying to locate Registry at path {self._uri.geturl()}" - ) from e - - def update_registry_proto(self, registry_proto: RegistryProto): - self._write_registry(registry_proto) - return - - def _write_registry(self, registry_proto: RegistryProto): - registry_proto.version_id = str(uuid.uuid4()) - registry_proto.last_updated.FromDatetime(datetime.utcnow()) - # we have already checked the bucket exists so no need to do it again - file_obj = TemporaryFile() - file_obj.write(registry_proto.SerializeToString()) - file_obj.seek(0) - self.s3_client.Bucket(self._bucket).put_object(Body=file_obj, Key=self._key) + def _existing_feature_view_names_to_fvs(self) -> Dict[str, Message]: + assert self.cached_registry_proto + odfvs = { + fv.spec.name: fv + for fv in self.cached_registry_proto.on_demand_feature_views + } + fvs = {fv.spec.name: fv for fv in self.cached_registry_proto.feature_views} + request_fvs = { + fv.spec.name: fv for fv in self.cached_registry_proto.request_feature_views + } + return {**odfvs, **fvs, **request_fvs} diff --git a/sdk/python/feast/registry_store.py b/sdk/python/feast/registry_store.py new file mode 100644 index 0000000000..c42a55cd9d --- /dev/null +++ b/sdk/python/feast/registry_store.py @@ -0,0 +1,49 @@ +from abc import ABC, abstractmethod + +from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto + + +class RegistryStore(ABC): + """ + A registry store is a storage backend for the Feast registry. + """ + + @abstractmethod + def get_registry_proto(self) -> RegistryProto: + """ + Retrieves the registry proto from the registry path. If there is no file at that path, + raises a FileNotFoundError. + + Returns: + Returns either the registry proto stored at the registry path, or an empty registry proto. + """ + pass + + @abstractmethod + def update_registry_proto(self, registry_proto: RegistryProto): + """ + Overwrites the current registry proto with the proto passed in. This method + writes to the registry path. + + Args: + registry_proto: the new RegistryProto + """ + pass + + @abstractmethod + def teardown(self): + """ + Tear down the registry. + """ + pass + + +class NoopRegistryStore(RegistryStore): + def get_registry_proto(self) -> RegistryProto: + pass + + def update_registry_proto(self, registry_proto: RegistryProto): + pass + + def teardown(self): + pass diff --git a/sdk/python/feast/repo_config.py b/sdk/python/feast/repo_config.py index 6c51350b05..c86a42a8bd 100644 --- a/sdk/python/feast/repo_config.py +++ b/sdk/python/feast/repo_config.py @@ -1,14 +1,31 @@ +import logging +import os from pathlib import Path from typing import Any import yaml -from pydantic import BaseModel, StrictInt, StrictStr, ValidationError, root_validator +from pydantic import ( + BaseModel, + StrictInt, + StrictStr, + ValidationError, + root_validator, + validator, +) from pydantic.error_wrappers import ErrorWrapper from pydantic.typing import Dict, Optional, Union -from feast.importer import get_class_from_type +from feast import flags +from feast.errors import ( + FeastFeatureServerTypeInvalidError, + FeastFeatureServerTypeSetError, + FeastProviderNotSetError, +) +from feast.importer import import_class from feast.usage import log_exceptions +_logger = logging.getLogger(__name__) + # These dict exists so that: # - existing values for the online store type in featurestore.yaml files continue to work in a backwards compatible way # - first party and third party implementations can use the same class loading code path. @@ -17,17 +34,31 @@ "datastore": "feast.infra.online_stores.datastore.DatastoreOnlineStore", "redis": "feast.infra.online_stores.redis.RedisOnlineStore", "dynamodb": "feast.infra.online_stores.dynamodb.DynamoDBOnlineStore", + "snowflake.online": "feast.infra.online_stores.snowflake.SnowflakeOnlineStore", } OFFLINE_STORE_CLASS_FOR_TYPE = { "file": "feast.infra.offline_stores.file.FileOfflineStore", "bigquery": "feast.infra.offline_stores.bigquery.BigQueryOfflineStore", "redshift": "feast.infra.offline_stores.redshift.RedshiftOfflineStore", + "snowflake.offline": "feast.infra.offline_stores.snowflake.SnowflakeOfflineStore", + "spark": "feast.infra.offline_stores.contrib.spark_offline_store.spark.SparkOfflineStore", + "trino": "feast.infra.offline_stores.contrib.trino_offline_store.trino.TrinoOfflineStore", +} + +FEATURE_SERVER_CONFIG_CLASS_FOR_TYPE = { + "aws_lambda": "feast.infra.feature_servers.aws_lambda.config.AwsLambdaFeatureServerConfig", + "gcp_cloudrun": "feast.infra.feature_servers.gcp_cloudrun.config.GcpCloudRunFeatureServerConfig", +} + +FEATURE_SERVER_TYPE_FOR_PROVIDER = { + "aws": "aws_lambda", + "gcp": "gcp_cloudrun", } class FeastBaseModel(BaseModel): - """ Feast Pydantic Configuration Class """ + """Feast Pydantic Configuration Class""" class Config: arbitrary_types_allowed = True @@ -35,7 +66,7 @@ class Config: class FeastConfigBaseModel(BaseModel): - """ Feast Pydantic Configuration Class """ + """Feast Pydantic Configuration Class""" class Config: arbitrary_types_allowed = True @@ -43,7 +74,10 @@ class Config: class RegistryConfig(FeastBaseModel): - """ Metadata Store Configuration. Configuration that relates to reading from and writing to the Feast registry.""" + """Metadata Store Configuration. Configuration that relates to reading from and writing to the Feast registry.""" + + registry_store_type: Optional[StrictStr] + """ str: Provider name or a class name that implements RegistryStore. """ path: StrictStr """ str: Path to metadata store. Can be a local path, or remote object storage path, e.g. a GCS URI """ @@ -56,7 +90,7 @@ class RegistryConfig(FeastBaseModel): class RepoConfig(FeastBaseModel): - """ Repo config. Typically loaded from `feature_store.yaml` """ + """Repo config. Typically loaded from `feature_store.yaml`""" registry: Union[StrictStr, RegistryConfig] = "data/registry.db" """ str: Path to metadata store. Can be a local path, or remote object storage path, e.g. a GCS URI """ @@ -76,10 +110,19 @@ class RepoConfig(FeastBaseModel): offline_store: Any """ OfflineStoreConfig: Offline store configuration (optional depending on provider) """ + feature_server: Optional[Any] + """ FeatureServerConfig: Feature server configuration (optional depending on provider) """ + + flags: Any + """ Flags: Feature flags for experimental features (optional) """ + repo_path: Optional[Path] = None + go_feature_retrieval: Optional[bool] = False + def __init__(self, **data: Any): super().__init__(**data) + if isinstance(self.online_store, Dict): self.online_store = get_online_config_from_type(self.online_store["type"])( **self.online_store @@ -94,6 +137,11 @@ def __init__(self, **data: Any): elif isinstance(self.offline_store, str): self.offline_store = get_offline_config_from_type(self.offline_store)() + if isinstance(self.feature_server, Dict): + self.feature_server = get_feature_server_config_from_type( + self.feature_server["type"] + )(**self.feature_server) + def get_registry_config(self): if isinstance(self.registry, str): return RegistryConfig(path=self.registry) @@ -113,8 +161,12 @@ def _validate_online_store_config(cls, values): if "online_store" not in values: values["online_store"] = dict() - # Skip if we aren't creating the configuration from a dict + # Skip if we aren't creating the configuration from a dict or online store is null or it is a string like "None" or "null" if not isinstance(values["online_store"], Dict): + if isinstance(values["online_store"], str) and values[ + "online_store" + ].lower() in {"none", "null"}: + values["online_store"] = None return values # Make sure that the provider configuration is set. We need it to set the defaults @@ -141,7 +193,6 @@ def _validate_online_store_config(cls, values): raise ValidationError( [ErrorWrapper(e, loc="online_store")], model=RepoConfig, ) - return values @root_validator(pre=True) @@ -164,7 +215,7 @@ def _validate_offline_store_config(cls, values): elif values["provider"] == "gcp": values["offline_store"]["type"] = "bigquery" elif values["provider"] == "aws": - values["offline_store"]["type"] = "file" + values["offline_store"]["type"] = "redshift" offline_store_type = values["offline_store"]["type"] @@ -179,6 +230,77 @@ def _validate_offline_store_config(cls, values): return values + @root_validator(pre=True) + def _validate_feature_server_config(cls, values): + # Having no feature server is the default. + if "feature_server" not in values: + return values + + # Skip if we aren't creating the configuration from a dict + if not isinstance(values["feature_server"], Dict): + return values + + # Make sure that the provider configuration is set. We need it to set the defaults + if "provider" not in values: + raise FeastProviderNotSetError() + + feature_server_type = FEATURE_SERVER_TYPE_FOR_PROVIDER.get(values["provider"]) + defined_type = values["feature_server"].get("type") + # Make sure that the type is either not set, or set correctly, since it's defined by the provider + if defined_type not in (None, feature_server_type): + raise FeastFeatureServerTypeSetError(defined_type) + values["feature_server"]["type"] = feature_server_type + + # Validate the dict to ensure one of the union types match + try: + feature_server_config_class = get_feature_server_config_from_type( + feature_server_type + ) + feature_server_config_class(**values["feature_server"]) + except ValidationError as e: + raise ValidationError( + [ErrorWrapper(e, loc="feature_server")], model=RepoConfig, + ) + + return values + + @validator("project") + def _validate_project_name(cls, v): + from feast.repo_operations import is_valid_name + + if not is_valid_name(v): + raise ValueError( + f"Project name, {v}, should only have " + f"alphanumerical values and underscores but not start with an underscore." + ) + return v + + @validator("flags") + def _validate_flags(cls, v): + if not isinstance(v, Dict): + return + + for flag_name, val in v.items(): + if flag_name not in flags.FLAG_NAMES: + _logger.warn( + "Unrecognized flag: %s. This feature may be invalid, or may refer " + "to a previously experimental feature which has graduated to production.", + flag_name, + ) + if type(val) is not bool: + raise ValueError(f"Flag value, {val}, not valid.") + + return v + + def write_to_path(self, repo_path: Path): + config_path = repo_path / "feature_store.yaml" + with open(config_path, mode="w") as f: + yaml.dump( + yaml.safe_load(self.json(exclude={"repo_path"}, exclude_unset=True,)), + f, + sort_keys=False, + ) + class FeastConfigError(Exception): def __init__(self, error_message, config_path): @@ -197,7 +319,7 @@ def __repr__(self) -> str: def get_data_source_class_from_type(data_source_type: str): module_name, config_class_name = data_source_type.rsplit(".", 1) - return get_class_from_type(module_name, config_class_name, "Source") + return import_class(module_name, config_class_name, "DataSource") def get_online_config_from_type(online_store_type: str): @@ -208,7 +330,7 @@ def get_online_config_from_type(online_store_type: str): module_name, online_store_class_type = online_store_type.rsplit(".", 1) config_class_name = f"{online_store_class_type}Config" - return get_class_from_type(module_name, config_class_name, config_class_name) + return import_class(module_name, config_class_name, config_class_name) def get_offline_config_from_type(offline_store_type: str): @@ -219,14 +341,24 @@ def get_offline_config_from_type(offline_store_type: str): module_name, offline_store_class_type = offline_store_type.rsplit(".", 1) config_class_name = f"{offline_store_class_type}Config" - return get_class_from_type(module_name, config_class_name, config_class_name) + return import_class(module_name, config_class_name, config_class_name) + + +def get_feature_server_config_from_type(feature_server_type: str): + # We do not support custom feature servers right now. + if feature_server_type not in FEATURE_SERVER_CONFIG_CLASS_FOR_TYPE: + raise FeastFeatureServerTypeInvalidError(feature_server_type) + + feature_server_type = FEATURE_SERVER_CONFIG_CLASS_FOR_TYPE[feature_server_type] + module_name, config_class_name = feature_server_type.rsplit(".", 1) + return import_class(module_name, config_class_name, config_class_name) def load_repo_config(repo_path: Path) -> RepoConfig: config_path = repo_path / "feature_store.yaml" with open(config_path) as f: - raw_config = yaml.safe_load(f) + raw_config = yaml.safe_load(os.path.expandvars(f.read())) try: c = RepoConfig(**raw_config) c.repo_path = repo_path diff --git a/sdk/python/feast/repo_contents.py b/sdk/python/feast/repo_contents.py new file mode 100644 index 0000000000..4d7c92f2a6 --- /dev/null +++ b/sdk/python/feast/repo_contents.py @@ -0,0 +1,53 @@ +# Copyright 2022 The Feast Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import List, NamedTuple + +from feast.data_source import DataSource +from feast.entity import Entity +from feast.feature_service import FeatureService +from feast.feature_view import FeatureView +from feast.on_demand_feature_view import OnDemandFeatureView +from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto +from feast.request_feature_view import RequestFeatureView + + +class RepoContents(NamedTuple): + """ + Represents the objects in a Feast feature repo. + """ + + data_sources: List[DataSource] + feature_views: List[FeatureView] + on_demand_feature_views: List[OnDemandFeatureView] + request_feature_views: List[RequestFeatureView] + entities: List[Entity] + feature_services: List[FeatureService] + + def to_registry_proto(self) -> RegistryProto: + registry_proto = RegistryProto() + registry_proto.data_sources.extend([e.to_proto() for e in self.data_sources]) + registry_proto.entities.extend([e.to_proto() for e in self.entities]) + registry_proto.feature_views.extend( + [fv.to_proto() for fv in self.feature_views] + ) + registry_proto.on_demand_feature_views.extend( + [fv.to_proto() for fv in self.on_demand_feature_views] + ) + registry_proto.request_feature_views.extend( + [fv.to_proto() for fv in self.request_feature_views] + ) + registry_proto.feature_services.extend( + [fs.to_proto() for fs in self.feature_services] + ) + return registry_proto diff --git a/sdk/python/feast/repo_operations.py b/sdk/python/feast/repo_operations.py index 6482451f71..40f1a055a9 100644 --- a/sdk/python/feast/repo_operations.py +++ b/sdk/python/feast/repo_operations.py @@ -1,43 +1,41 @@ import importlib +import json import os import random import re import sys -from datetime import timedelta from importlib.abc import Loader +from importlib.machinery import ModuleSpec from pathlib import Path -from typing import List, NamedTuple, Set, Union +from typing import List, Set, Union import click from click.exceptions import BadParameter -from feast import Entity, FeatureTable -from feast.feature_view import FeatureView -from feast.inference import ( - update_data_sources_with_inferred_event_timestamp_col, - update_entities_with_inferred_types_from_feature_views, -) -from feast.infra.provider import get_provider +from feast import PushSource +from feast.data_source import DataSource +from feast.diff.registry_diff import extract_objects_for_keep_delete_update_add +from feast.entity import Entity +from feast.feature_service import FeatureService +from feast.feature_store import FeatureStore +from feast.feature_view import DUMMY_ENTITY, FeatureView from feast.names import adjectives, animals -from feast.registry import Registry +from feast.on_demand_feature_view import OnDemandFeatureView +from feast.registry import FEAST_OBJECT_TYPES, FeastObjectType, Registry from feast.repo_config import RepoConfig +from feast.repo_contents import RepoContents +from feast.request_feature_view import RequestFeatureView from feast.usage import log_exceptions_and_usage -def py_path_to_module(path: Path, repo_root: Path) -> str: +def py_path_to_module(path: Path) -> str: return ( - str(path.relative_to(repo_root))[: -len(".py")] + str(path.relative_to(os.getcwd()))[: -len(".py")] .replace("./", "") .replace("/", ".") ) -class ParsedRepo(NamedTuple): - feature_tables: List[FeatureTable] - feature_views: List[FeatureView] - entities: List[Entity] - - def read_feastignore(repo_root: Path) -> List[str]: """Read .feastignore in the repo root directory (if exists) and return the list of user-defined ignore paths""" feast_ignore = repo_root / ".feastignore" @@ -83,7 +81,11 @@ def get_repo_files(repo_root: Path) -> List[Path]: ignore_files = get_ignore_files(repo_root, ignore_paths) # List all Python files in the root directory (recursively) - repo_files = {p.resolve() for p in repo_root.glob("**/*.py") if p.is_file()} + repo_files = { + p.resolve() + for p in repo_root.glob("**/*.py") + if p.is_file() and "__init__.py" != p.name + } # Ignore all files that match any of the ignore paths in .feastignore repo_files -= ignore_files @@ -91,191 +93,202 @@ def get_repo_files(repo_root: Path) -> List[Path]: return sorted(repo_files) -def parse_repo(repo_root: Path) -> ParsedRepo: - """ Collect feature table definitions from feature repo """ - res = ParsedRepo(feature_tables=[], entities=[], feature_views=[]) +def parse_repo(repo_root: Path) -> RepoContents: + """ + Collects unique Feast object definitions from the given feature repo. + + Specifically, if an object foo has already been added, bar will still be added if + (bar == foo), but not if (bar is foo). This ensures that import statements will + not result in duplicates, but defining two equal objects will. + """ + res = RepoContents( + data_sources=[], + entities=[], + feature_views=[], + feature_services=[], + on_demand_feature_views=[], + request_feature_views=[], + ) for repo_file in get_repo_files(repo_root): - module_path = py_path_to_module(repo_file, repo_root) + module_path = py_path_to_module(repo_file) module = importlib.import_module(module_path) - for attr_name in dir(module): obj = getattr(module, attr_name) - if isinstance(obj, FeatureTable): - res.feature_tables.append(obj) - if isinstance(obj, FeatureView): + if isinstance(obj, DataSource) and not any( + (obj is ds) for ds in res.data_sources + ): + res.data_sources.append(obj) + if isinstance(obj, FeatureView) and not any( + (obj is fv) for fv in res.feature_views + ): res.feature_views.append(obj) - elif isinstance(obj, Entity): + if isinstance(obj.stream_source, PushSource) and not any( + (obj is ds) for ds in res.data_sources + ): + res.data_sources.append(obj.stream_source.batch_source) + elif isinstance(obj, Entity) and not any( + (obj is entity) for entity in res.entities + ): res.entities.append(obj) + elif isinstance(obj, FeatureService) and not any( + (obj is fs) for fs in res.feature_services + ): + res.feature_services.append(obj) + elif isinstance(obj, OnDemandFeatureView) and not any( + (obj is odfv) for odfv in res.on_demand_feature_views + ): + res.on_demand_feature_views.append(obj) + elif isinstance(obj, RequestFeatureView) and not any( + (obj is rfv) for rfv in res.request_feature_views + ): + res.request_feature_views.append(obj) + res.entities.append(DUMMY_ENTITY) return res @log_exceptions_and_usage -def apply_total(repo_config: RepoConfig, repo_path: Path, skip_source_validation: bool): - from colorama import Fore, Style +def plan(repo_config: RepoConfig, repo_path: Path, skip_source_validation: bool): os.chdir(repo_path) - registry_config = repo_config.get_registry_config() - project = repo_config.project + project, registry, repo, store = _prepare_registry_and_repo(repo_config, repo_path) + + if not skip_source_validation: + data_sources = [t.batch_source for t in repo.feature_views] + # Make sure the data source used by this feature view is supported by Feast + for data_source in data_sources: + data_source.validate(store.config) + + registry_diff, infra_diff, _ = store._plan(repo) + click.echo(registry_diff.to_string()) + click.echo(infra_diff.to_string()) + + +def _prepare_registry_and_repo(repo_config, repo_path): + store = FeatureStore(config=repo_config) + project = store.project if not is_valid_name(project): print( f"{project} is not valid. Project name should only have " f"alphanumerical values and underscores but not start with an underscore." ) sys.exit(1) - registry = Registry( - registry_path=registry_config.path, - repo_path=repo_path, - cache_ttl=timedelta(seconds=registry_config.cache_ttl_seconds), - ) - registry._initialize_registry() + registry = store.registry sys.dont_write_bytecode = True repo = parse_repo(repo_path) - data_sources = [t.input for t in repo.feature_views] + return project, registry, repo, store + + +def extract_objects_for_apply_delete(project, registry, repo): + # TODO(achals): This code path should be refactored to handle added & kept entities separately. + ( + _, + objs_to_delete, + objs_to_update, + objs_to_add, + ) = extract_objects_for_keep_delete_update_add(registry, project, repo) + + all_to_apply: List[ + Union[ + Entity, FeatureView, RequestFeatureView, OnDemandFeatureView, FeatureService + ] + ] = [] + for object_type in FEAST_OBJECT_TYPES: + to_apply = set(objs_to_add[object_type]).union(objs_to_update[object_type]) + all_to_apply.extend(to_apply) + + all_to_delete: List[ + Union[ + Entity, FeatureView, RequestFeatureView, OnDemandFeatureView, FeatureService + ] + ] = [] + for object_type in FEAST_OBJECT_TYPES: + all_to_delete.extend(objs_to_delete[object_type]) + + return ( + all_to_apply, + all_to_delete, + set(objs_to_add[FeastObjectType.FEATURE_VIEW]).union( + set(objs_to_update[FeastObjectType.FEATURE_VIEW]) + ), + objs_to_delete[FeastObjectType.FEATURE_VIEW], + ) + +def apply_total_with_repo_instance( + store: FeatureStore, + project: str, + registry: Registry, + repo: RepoContents, + skip_source_validation: bool, +): if not skip_source_validation: + data_sources = [t.batch_source for t in repo.feature_views] # Make sure the data source used by this feature view is supported by Feast for data_source in data_sources: - data_source.validate(repo_config) - - # Make inferences - update_entities_with_inferred_types_from_feature_views( - repo.entities, repo.feature_views, repo_config - ) - update_data_sources_with_inferred_event_timestamp_col(data_sources, repo_config) - for view in repo.feature_views: - view.infer_features_from_input_source(repo_config) + data_source.validate(store.config) - repo_table_names = set(t.name for t in repo.feature_tables) + registry_diff, infra_diff, new_infra = store._plan(repo) - for t in repo.feature_views: - repo_table_names.add(t.name) + # For each object in the registry, determine whether it should be kept or deleted. + ( + all_to_apply, + all_to_delete, + views_to_keep, + views_to_delete, + ) = extract_objects_for_apply_delete(project, registry, repo) - tables_to_delete = [] - for registry_table in registry.list_feature_tables(project=project): - if registry_table.name not in repo_table_names: - tables_to_delete.append(registry_table) + click.echo(registry_diff.to_string()) - views_to_delete = [] - for registry_view in registry.list_feature_views(project=project): - if registry_view.name not in repo_table_names: - views_to_delete.append(registry_view) - - sys.dont_write_bytecode = False - for entity in repo.entities: - registry.apply_entity(entity, project=project, commit=False) - click.echo( - f"Registered entity {Style.BRIGHT + Fore.GREEN}{entity.name}{Style.RESET_ALL}" - ) + if store._should_use_plan(): + store._apply_diffs(registry_diff, infra_diff, new_infra) + click.echo(infra_diff.to_string()) + else: + store.apply(all_to_apply, objects_to_delete=all_to_delete, partial=False) + log_infra_changes(views_to_keep, views_to_delete) - # Delete tables that should not exist - for registry_table in tables_to_delete: - registry.delete_feature_table( - registry_table.name, project=project, commit=False - ) - click.echo( - f"Deleted feature table {Style.BRIGHT + Fore.GREEN}{registry_table.name}{Style.RESET_ALL} from registry" - ) - # Create tables that should - for table in repo.feature_tables: - registry.apply_feature_table(table, project, commit=False) - click.echo( - f"Registered feature table {Style.BRIGHT + Fore.GREEN}{table.name}{Style.RESET_ALL}" - ) +def log_infra_changes( + views_to_keep: Set[FeatureView], views_to_delete: Set[FeatureView] +): + from colorama import Fore, Style - # Delete views that should not exist - for registry_view in views_to_delete: - registry.delete_feature_view(registry_view.name, project=project, commit=False) + for view in views_to_keep: click.echo( - f"Deleted feature view {Style.BRIGHT + Fore.GREEN}{registry_view.name}{Style.RESET_ALL} from registry" + f"Deploying infrastructure for {Style.BRIGHT + Fore.GREEN}{view.name}{Style.RESET_ALL}" ) - - # Create views that should - for view in repo.feature_views: - registry.apply_feature_view(view, project, commit=False) + for view in views_to_delete: click.echo( - f"Registered feature view {Style.BRIGHT + Fore.GREEN}{view.name}{Style.RESET_ALL}" + f"Removing infrastructure for {Style.BRIGHT + Fore.RED}{view.name}{Style.RESET_ALL}" ) - registry.commit() - infra_provider = get_provider(repo_config, repo_path) - all_to_delete: List[Union[FeatureTable, FeatureView]] = [] - all_to_delete.extend(tables_to_delete) - all_to_delete.extend(views_to_delete) - - all_to_keep: List[Union[FeatureTable, FeatureView]] = [] - all_to_keep.extend(repo.feature_tables) - all_to_keep.extend(repo.feature_views) - - entities_to_delete: List[Entity] = [] - repo_entities_names = set([e.name for e in repo.entities]) - for registry_entity in registry.list_entities(project=project): - if registry_entity.name not in repo_entities_names: - entities_to_delete.append(registry_entity) - - entities_to_keep: List[Entity] = repo.entities - - for name in [view.name for view in repo.feature_tables] + [ - table.name for table in repo.feature_views - ]: - click.echo( - f"Deploying infrastructure for {Style.BRIGHT + Fore.GREEN}{name}{Style.RESET_ALL}" - ) - for name in [view.name for view in views_to_delete] + [ - table.name for table in tables_to_delete - ]: - click.echo( - f"Removing infrastructure for {Style.BRIGHT + Fore.GREEN}{name}{Style.RESET_ALL}" - ) +@log_exceptions_and_usage +def apply_total(repo_config: RepoConfig, repo_path: Path, skip_source_validation: bool): - infra_provider.update_infra( - project, - tables_to_delete=all_to_delete, - tables_to_keep=all_to_keep, - entities_to_delete=entities_to_delete, - entities_to_keep=entities_to_keep, - partial=False, + os.chdir(repo_path) + project, registry, repo, store = _prepare_registry_and_repo(repo_config, repo_path) + apply_total_with_repo_instance( + store, project, registry, repo, skip_source_validation ) @log_exceptions_and_usage def teardown(repo_config: RepoConfig, repo_path: Path): - registry_config = repo_config.get_registry_config() - registry = Registry( - registry_path=registry_config.path, - repo_path=repo_path, - cache_ttl=timedelta(seconds=registry_config.cache_ttl_seconds), - ) - project = repo_config.project - registry_tables: List[Union[FeatureTable, FeatureView]] = [] - registry_tables.extend(registry.list_feature_tables(project=project)) - registry_tables.extend(registry.list_feature_views(project=project)) - - registry_entities: List[Entity] = registry.list_entities(project=project) - - infra_provider = get_provider(repo_config, repo_path) - infra_provider.teardown_infra( - project, tables=registry_tables, entities=registry_entities - ) + # Cannot pass in both repo_path and repo_config to FeatureStore. + feature_store = FeatureStore(repo_path=repo_path, config=None) + feature_store.teardown() @log_exceptions_and_usage def registry_dump(repo_config: RepoConfig, repo_path: Path): - """ For debugging only: output contents of the metadata registry """ + """For debugging only: output contents of the metadata registry""" registry_config = repo_config.get_registry_config() project = repo_config.project - registry = Registry( - registry_path=registry_config.path, - repo_path=repo_path, - cache_ttl=timedelta(seconds=registry_config.cache_ttl_seconds), - ) + registry = Registry(registry_config=registry_config, repo_path=repo_path) + registry_dict = registry.to_dict(project=project) - for entity in registry.list_entities(project=project): - print(entity) - for feature_view in registry.list_feature_views(project=project): - print(feature_view) + click.echo(json.dumps(registry_dict, indent=2, sort_keys=True)) def cli_check_repo(repo_path: Path): @@ -328,6 +341,7 @@ def init_repo(repo_name: str, template: str): import importlib.util spec = importlib.util.spec_from_file_location("bootstrap", str(bootstrap_path)) + assert isinstance(spec, ModuleSpec) bootstrap = importlib.util.module_from_spec(spec) assert isinstance(spec.loader, Loader) spec.loader.exec_module(bootstrap) diff --git a/sdk/python/feast/request_feature_view.py b/sdk/python/feast/request_feature_view.py new file mode 100644 index 0000000000..7248ffe989 --- /dev/null +++ b/sdk/python/feast/request_feature_view.py @@ -0,0 +1,137 @@ +import copy +import warnings +from typing import Dict, List, Optional, Type + +from feast.base_feature_view import BaseFeatureView +from feast.data_source import RequestSource +from feast.feature_view_projection import FeatureViewProjection +from feast.field import Field +from feast.protos.feast.core.RequestFeatureView_pb2 import ( + RequestFeatureView as RequestFeatureViewProto, +) +from feast.protos.feast.core.RequestFeatureView_pb2 import RequestFeatureViewSpec +from feast.usage import log_exceptions + + +class RequestFeatureView(BaseFeatureView): + """ + [Experimental] A RequestFeatureView defines a logical group of features that should + be available as an input to an on demand feature view at request time. + + Attributes: + name: The unique name of the request feature view. + request_source: The request source that specifies the schema and + features of the request feature view. + features: The list of features defined as part of this request feature view. + description: A human-readable description. + tags: A dictionary of key-value pairs to store arbitrary metadata. + owner: The owner of the request feature view, typically the email of the primary + maintainer. + """ + + name: str + request_source: RequestSource + features: List[Field] + description: str + tags: Dict[str, str] + owner: str + + @log_exceptions + def __init__( + self, + name: str, + request_data_source: RequestSource, + description: str = "", + tags: Optional[Dict[str, str]] = None, + owner: str = "", + ): + """ + Creates a RequestFeatureView object. + + Args: + name: The unique name of the request feature view. + request_data_source: The request data source that specifies the schema and + features of the request feature view. + description (optional): A human-readable description. + tags (optional): A dictionary of key-value pairs to store arbitrary metadata. + owner (optional): The owner of the request feature view, typically the email + of the primary maintainer. + """ + warnings.warn( + "Request feature view is deprecated. " + "Please use request data source instead", + DeprecationWarning, + ) + + if isinstance(request_data_source.schema, Dict): + new_features = [ + Field(name=name, dtype=dtype) + for name, dtype in request_data_source.schema.items() + ] + else: + new_features = request_data_source.schema + + super().__init__( + name=name, + features=new_features, + description=description, + tags=tags, + owner=owner, + ) + self.request_source = request_data_source + + @property + def proto_class(self) -> Type[RequestFeatureViewProto]: + return RequestFeatureViewProto + + def to_proto(self) -> RequestFeatureViewProto: + """ + Converts an request feature view object to its protobuf representation. + + Returns: + A RequestFeatureViewProto protobuf. + """ + spec = RequestFeatureViewSpec( + name=self.name, + request_data_source=self.request_source.to_proto(), + description=self.description, + tags=self.tags, + owner=self.owner, + ) + + return RequestFeatureViewProto(spec=spec) + + @classmethod + def from_proto(cls, request_feature_view_proto: RequestFeatureViewProto): + """ + Creates a request feature view from a protobuf representation. + + Args: + request_feature_view_proto: A protobuf representation of an request feature view. + + Returns: + A RequestFeatureView object based on the request feature view protobuf. + """ + + request_feature_view_obj = cls( + name=request_feature_view_proto.spec.name, + request_data_source=RequestSource.from_proto( + request_feature_view_proto.spec.request_data_source + ), + description=request_feature_view_proto.spec.description, + tags=dict(request_feature_view_proto.spec.tags), + owner=request_feature_view_proto.spec.owner, + ) + + # FeatureViewProjections are not saved in the RequestFeatureView proto. + # Create the default projection. + request_feature_view_obj.projection = FeatureViewProjection.from_definition( + request_feature_view_obj + ) + + return request_feature_view_obj + + def __copy__(self): + fv = RequestFeatureView(name=self.name, request_data_source=self.request_source) + fv.projection = copy.copy(self.projection) + return fv diff --git a/sdk/python/feast/saved_dataset.py b/sdk/python/feast/saved_dataset.py new file mode 100644 index 0000000000..aead7fe8ef --- /dev/null +++ b/sdk/python/feast/saved_dataset.py @@ -0,0 +1,223 @@ +from abc import abstractmethod +from datetime import datetime +from typing import TYPE_CHECKING, Dict, List, Optional, Type, cast + +import pandas as pd +import pyarrow +from google.protobuf.json_format import MessageToJson + +from feast.data_source import DataSource +from feast.dqm.profilers.profiler import Profile, Profiler +from feast.protos.feast.core.SavedDataset_pb2 import SavedDataset as SavedDatasetProto +from feast.protos.feast.core.SavedDataset_pb2 import SavedDatasetMeta, SavedDatasetSpec +from feast.protos.feast.core.SavedDataset_pb2 import ( + SavedDatasetStorage as SavedDatasetStorageProto, +) + +if TYPE_CHECKING: + from feast.infra.offline_stores.offline_store import RetrievalJob + + +class _StorageRegistry(type): + classes_by_proto_attr_name: Dict[str, Type["SavedDatasetStorage"]] = {} + + def __new__(cls, name, bases, dct): + kls = type.__new__(cls, name, bases, dct) + if dct.get("_proto_attr_name"): + cls.classes_by_proto_attr_name[dct["_proto_attr_name"]] = kls + return kls + + +class SavedDatasetStorage(metaclass=_StorageRegistry): + _proto_attr_name: str + + @staticmethod + def from_proto(storage_proto: SavedDatasetStorageProto) -> "SavedDatasetStorage": + proto_attr_name = cast(str, storage_proto.WhichOneof("kind")) + return _StorageRegistry.classes_by_proto_attr_name[proto_attr_name].from_proto( + storage_proto + ) + + @abstractmethod + def to_proto(self) -> SavedDatasetStorageProto: + ... + + @abstractmethod + def to_data_source(self) -> DataSource: + ... + + +class SavedDataset: + name: str + features: List[str] + join_keys: List[str] + full_feature_names: bool + storage: SavedDatasetStorage + tags: Dict[str, str] + feature_service_name: Optional[str] = None + + created_timestamp: Optional[datetime] = None + last_updated_timestamp: Optional[datetime] = None + + min_event_timestamp: Optional[datetime] = None + max_event_timestamp: Optional[datetime] = None + + _retrieval_job: Optional["RetrievalJob"] = None + + def __init__( + self, + name: str, + features: List[str], + join_keys: List[str], + storage: SavedDatasetStorage, + full_feature_names: bool = False, + tags: Optional[Dict[str, str]] = None, + feature_service_name: Optional[str] = None, + ): + self.name = name + self.features = features + self.join_keys = join_keys + self.storage = storage + self.full_feature_names = full_feature_names + self.tags = tags or {} + self.feature_service_name = feature_service_name + + self._retrieval_job = None + + def __repr__(self): + items = (f"{k} = {v}" for k, v in self.__dict__.items()) + return f"<{self.__class__.__name__}({', '.join(items)})>" + + def __str__(self): + return str(MessageToJson(self.to_proto())) + + def __hash__(self): + return hash((self.name)) + + def __eq__(self, other): + if not isinstance(other, SavedDataset): + raise TypeError( + "Comparisons should only involve SavedDataset class objects." + ) + + if ( + self.name != other.name + or sorted(self.features) != sorted(other.features) + or sorted(self.join_keys) != sorted(other.join_keys) + or self.storage != other.storage + or self.full_feature_names != other.full_feature_names + or self.tags != other.tags + or self.feature_service_name != other.feature_service_name + ): + return False + + return True + + @staticmethod + def from_proto(saved_dataset_proto: SavedDatasetProto): + """ + Converts a SavedDatasetProto to a SavedDataset object. + + Args: + saved_dataset_proto: A protobuf representation of a SavedDataset. + """ + ds = SavedDataset( + name=saved_dataset_proto.spec.name, + features=list(saved_dataset_proto.spec.features), + join_keys=list(saved_dataset_proto.spec.join_keys), + full_feature_names=saved_dataset_proto.spec.full_feature_names, + storage=SavedDatasetStorage.from_proto(saved_dataset_proto.spec.storage), + tags=dict(saved_dataset_proto.spec.tags.items()), + ) + + if saved_dataset_proto.spec.feature_service_name: + ds.feature_service_name = saved_dataset_proto.spec.feature_service_name + + if saved_dataset_proto.meta.HasField("created_timestamp"): + ds.created_timestamp = ( + saved_dataset_proto.meta.created_timestamp.ToDatetime() + ) + if saved_dataset_proto.meta.HasField("last_updated_timestamp"): + ds.last_updated_timestamp = ( + saved_dataset_proto.meta.last_updated_timestamp.ToDatetime() + ) + if saved_dataset_proto.meta.HasField("min_event_timestamp"): + ds.min_event_timestamp = ( + saved_dataset_proto.meta.min_event_timestamp.ToDatetime() + ) + if saved_dataset_proto.meta.HasField("max_event_timestamp"): + ds.max_event_timestamp = ( + saved_dataset_proto.meta.max_event_timestamp.ToDatetime() + ) + + return ds + + def to_proto(self) -> SavedDatasetProto: + """ + Converts a SavedDataset to its protobuf representation. + + Returns: + A SavedDatasetProto protobuf. + """ + meta = SavedDatasetMeta() + if self.created_timestamp: + meta.created_timestamp.FromDatetime(self.created_timestamp) + if self.min_event_timestamp: + meta.min_event_timestamp.FromDatetime(self.min_event_timestamp) + if self.max_event_timestamp: + meta.max_event_timestamp.FromDatetime(self.max_event_timestamp) + + spec = SavedDatasetSpec( + name=self.name, + features=self.features, + join_keys=self.join_keys, + full_feature_names=self.full_feature_names, + storage=self.storage.to_proto(), + tags=self.tags, + ) + if self.feature_service_name: + spec.feature_service_name = self.feature_service_name + + feature_service_proto = SavedDatasetProto(spec=spec, meta=meta) + return feature_service_proto + + def with_retrieval_job(self, retrieval_job: "RetrievalJob") -> "SavedDataset": + self._retrieval_job = retrieval_job + return self + + def to_df(self) -> pd.DataFrame: + if not self._retrieval_job: + raise RuntimeError( + "To load this dataset use FeatureStore.get_saved_dataset() " + "instead of instantiating it directly." + ) + + return self._retrieval_job.to_df() + + def to_arrow(self) -> pyarrow.Table: + if not self._retrieval_job: + raise RuntimeError( + "To load this dataset use FeatureStore.get_saved_dataset() " + "instead of instantiating it directly." + ) + + return self._retrieval_job.to_arrow() + + def as_reference(self, profiler: "Profiler") -> "ValidationReference": + return ValidationReference(profiler=profiler, dataset=self) + + def get_profile(self, profiler: Profiler) -> Profile: + return profiler.analyze_dataset(self.to_df()) + + +class ValidationReference: + dataset: SavedDataset + profiler: Profiler + + def __init__(self, dataset: SavedDataset, profiler: Profiler): + self.dataset = dataset + self.profiler = profiler + + @property + def profile(self) -> Profile: + return self.profiler.analyze_dataset(self.dataset.to_df()) diff --git a/sdk/python/feast/staging/entities.py b/sdk/python/feast/staging/entities.py deleted file mode 100644 index 17f0959fce..0000000000 --- a/sdk/python/feast/staging/entities.py +++ /dev/null @@ -1,137 +0,0 @@ -import os -import tempfile -import uuid -from datetime import datetime, timedelta -from typing import List -from urllib.parse import urlparse - -import pandas as pd - -from feast import BigQuerySource, FileSource -from feast.config import Config -from feast.data_format import ParquetFormat -from feast.staging.storage_client import get_staging_client - -try: - from google.cloud import bigquery -except ImportError: - bigquery = None - - -def stage_entities_to_fs( - entity_source: pd.DataFrame, staging_location: str, config: Config -) -> FileSource: - """ - Dumps given (entities) dataframe as parquet file and stage it to remote file storage (subdirectory of staging_location) - - :return: FileSource with remote destination path - """ - entity_staging_uri = urlparse(os.path.join(staging_location, str(uuid.uuid4()))) - staging_client = get_staging_client(entity_staging_uri.scheme, config) - with tempfile.NamedTemporaryFile() as df_export_path: - # prevent casting ns -> ms exception inside pyarrow - entity_source["event_timestamp"] = entity_source["event_timestamp"].dt.floor( - "ms" - ) - - entity_source.to_parquet(df_export_path.name) - - with open(df_export_path.name, "rb") as f: - staging_client.upload_fileobj( - f, df_export_path.name, remote_uri=entity_staging_uri - ) - - # ToDo: support custom event_timestamp_column - return FileSource( - event_timestamp_column="event_timestamp", - file_format=ParquetFormat(), - file_url=entity_staging_uri.geturl(), - ) - - -def table_reference_from_string(table_ref: str): - """ - Parses reference string with format "{project}:{dataset}.{table}" into bigquery.TableReference - """ - project, dataset_and_table = table_ref.split(":") - dataset, table_id = dataset_and_table.split(".") - return bigquery.TableReference( - bigquery.DatasetReference(project, dataset), table_id - ) - - -def stage_entities_to_bq( - entity_source: pd.DataFrame, project: str, dataset: str -) -> BigQuerySource: - """ - Stores given (entity) dataframe as new table in BQ. Name of the table generated based on current time. - Table will expire in 1 day. - Returns BigQuerySource with reference to created table. - """ - bq_client = bigquery.Client() - destination = bigquery.TableReference( - bigquery.DatasetReference(project, dataset), - f"_entities_{datetime.now():%Y%m%d%H%M%s}", - ) - - # prevent casting ns -> ms exception inside pyarrow - entity_source["event_timestamp"] = entity_source["event_timestamp"].dt.floor("ms") - - load_job: bigquery.LoadJob = bq_client.load_table_from_dataframe( - entity_source, destination - ) - load_job.result() # wait until complete - - dest_table: bigquery.Table = bq_client.get_table(destination) - dest_table.expires = datetime.now() + timedelta(days=1) - bq_client.update_table(dest_table, fields=["expires"]) - - return BigQuerySource( - event_timestamp_column="event_timestamp", - table_ref=f"{destination.project}:{destination.dataset_id}.{destination.table_id}", - ) - - -JOIN_TEMPLATE = """SELECT - source.* -FROM - `{entities.project}.{entities.dataset_id}.{entities.table_id}` entities -JOIN - `{source.project}.{source.dataset_id}.{source.table_id}` source -ON - ({entity_key})""" - - -def create_bq_view_of_joined_features_and_entities( - source: BigQuerySource, entity_source: BigQuerySource, entity_names: List[str] -) -> BigQuerySource: - """ - Creates BQ view that joins tables from `source` and `entity_source` with join key derived from `entity_names`. - Returns BigQuerySource with reference to created view. - """ - bq_client = bigquery.Client() - - source_ref = table_reference_from_string(source.bigquery_options.table_ref) - entities_ref = table_reference_from_string(entity_source.bigquery_options.table_ref) - - destination_ref = bigquery.TableReference( - bigquery.DatasetReference(source_ref.project, source_ref.dataset_id), - f"_view_{source_ref.table_id}_{datetime.now():%Y%m%d%H%M%s}", - ) - - view = bigquery.Table(destination_ref) - view.view_query = JOIN_TEMPLATE.format( - entities=entities_ref, - source=source_ref, - entity_key=" AND ".join([f"source.{e} = entities.{e}" for e in entity_names]), - ) - view.expires = datetime.now() + timedelta(days=1) - bq_client.create_table(view) - - return BigQuerySource( - event_timestamp_column=source.event_timestamp_column, - created_timestamp_column=source.created_timestamp_column, - table_ref=f"{view.project}:{view.dataset_id}.{view.table_id}", - field_mapping=source.field_mapping, - date_partition_column=source.date_partition_column, - ) diff --git a/sdk/python/feast/staging/storage_client.py b/sdk/python/feast/staging/storage_client.py deleted file mode 100644 index 63d574460f..0000000000 --- a/sdk/python/feast/staging/storage_client.py +++ /dev/null @@ -1,513 +0,0 @@ -# -# Copyright 2020 The Feast Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import hashlib -import os -import re -import shutil -from abc import ABC, ABCMeta, abstractmethod -from tempfile import TemporaryFile -from typing import List, Optional, Tuple -from typing.io import IO -from urllib.parse import ParseResult, urlparse - -from google.auth.exceptions import DefaultCredentialsError - -from feast.config import Config -from feast.constants import ConfigOptions as opt - -GS = "gs" -S3 = "s3" -S3A = "s3a" -AZURE_SCHEME = "wasbs" -LOCAL_FILE = "file" - - -def _hash_fileobj(fileobj: IO[bytes]) -> str: - """ Compute sha256 hash of a file. File pointer will be reset to 0 on return. """ - fileobj.seek(0) - h = hashlib.sha256() - for block in iter(lambda: fileobj.read(2 ** 20), b""): - h.update(block) - fileobj.seek(0) - return h.hexdigest() - - -def _gen_remote_uri( - fileobj: IO[bytes], - remote_uri: Optional[ParseResult], - remote_path_prefix: Optional[str], - remote_path_suffix: Optional[str], - sha256sum: Optional[str], -) -> ParseResult: - if remote_uri is None: - assert remote_path_prefix is not None and remote_path_suffix is not None - - if sha256sum is None: - sha256sum = _hash_fileobj(fileobj) - - return urlparse( - os.path.join(remote_path_prefix, f"{sha256sum}{remote_path_suffix}") - ) - else: - return remote_uri - - -class AbstractStagingClient(ABC): - """ - Client used to stage files in order to upload or download datasets into a historical store. - """ - - __metaclass__ = ABCMeta - - @abstractmethod - def __init__(self): - pass - - @abstractmethod - def download_file(self, uri: ParseResult) -> IO[bytes]: - """ - Downloads a file from an object store and returns a TemporaryFile object - """ - pass - - @abstractmethod - def list_files(self, uri: ParseResult) -> List[str]: - """ - Lists all the files under a directory in an object store. - """ - pass - - @abstractmethod - def upload_fileobj( - self, - fileobj: IO[bytes], - local_path: str, - *, - remote_uri: Optional[ParseResult] = None, - remote_path_prefix: Optional[str] = None, - remote_path_suffix: Optional[str] = None, - ) -> ParseResult: - """ - Uploads a file to an object store. You can either specify the destination object URI, - or destination suffix+prefix. In the latter case, this interface will work as a - content-addressable storage and the remote path will be computed using sha256 of the - uploaded content as `$remote_path_prefix/$sha256$remote_path_suffix` - - Args: - fileobj (IO[bytes]): file-like object containing the data to be uploaded. It needs to - supports seek() operation in addition to read/write. - local_path (str): a file name associated with fileobj. This param is only used for - diagnostic messages. If `fileobj` is a local file, pass its filename here. - remote_uri (ParseResult or None): destination object URI to upload to - remote_path_prefix (str or None): destination path prefix to upload to when using - content-addressable storage mode - remote_path_suffix (str or None): destination path suffix to upload to when using - content-addressable storage mode - - Returns: - ParseResult: the URI to the uploaded file. It would be the same as `remote_uri` if - `remote_uri` was passed in. Otherwise it will be the path computed from - `remote_path_prefix` and `remote_path_suffix`. - """ - pass - - -class GCSClient(AbstractStagingClient): - """ - Implementation of AbstractStagingClient for google cloud storage - """ - - def __init__(self): - try: - from google.cloud import storage - except ImportError: - raise ImportError( - "Install package google-cloud-storage==1.20.* for gcs staging support" - "run ```pip install google-cloud-storage==1.20.*```" - ) - try: - self.gcs_client = storage.Client(project=None) - except DefaultCredentialsError: - self.gcs_client = storage.Client.create_anonymous_client() - - def download_file(self, uri: ParseResult) -> IO[bytes]: - """ - Downloads a file from google cloud storage and returns a TemporaryFile object - - Args: - uri (urllib.parse.ParseResult): Parsed uri of the file ex: urlparse("gs://bucket/file.avro") - - Returns: - TemporaryFile object - """ - url = uri.geturl() - file_obj = TemporaryFile() - self.gcs_client.download_blob_to_file(url, file_obj) - file_obj.seek(0) - return file_obj - - def list_files(self, uri: ParseResult) -> List[str]: - """ - Lists all the files under a directory in google cloud storage if path has wildcard(*) character. - - Args: - uri (urllib.parse.ParseResult): Parsed uri of this location - - Returns: - List[str]: A list containing the full path to the file(s) in the - remote staging location. - """ - - bucket, path = self._uri_to_bucket_key(uri) - gs_bucket = self.gcs_client.get_bucket(bucket) - - if "*" in path: - regex = re.compile(path.replace("*", ".*?").strip("/")) - blob_list = gs_bucket.list_blobs( - prefix=path.strip("/").split("*")[0], delimiter="/" - ) - # File path should not be in path (file path must be longer than path) - return [ - f"{GS}://{bucket}/{file}" - for file in [x.name for x in blob_list] - if re.match(regex, file) and file not in path - ] - else: - return [f"{GS}://{bucket}/{path}"] - - def _uri_to_bucket_key(self, remote_path: ParseResult) -> Tuple[str, str]: - assert remote_path.hostname is not None - return remote_path.hostname, remote_path.path.lstrip("/") - - def upload_fileobj( - self, - fileobj: IO[bytes], - local_path: str, - *, - remote_uri: Optional[ParseResult] = None, - remote_path_prefix: Optional[str] = None, - remote_path_suffix: Optional[str] = None, - ) -> ParseResult: - remote_uri = _gen_remote_uri( - fileobj, remote_uri, remote_path_prefix, remote_path_suffix, None - ) - bucket, key = self._uri_to_bucket_key(remote_uri) - gs_bucket = self.gcs_client.get_bucket(bucket) - blob = gs_bucket.blob(key) - blob.upload_from_file(fileobj) - return remote_uri - - -class S3Client(AbstractStagingClient): - """ - Implementation of AbstractStagingClient for Aws S3 storage - """ - - def __init__(self, endpoint_url: str = None, url_scheme="s3"): - try: - import boto3 - except ImportError: - raise ImportError( - "Install package boto3 for s3 staging support" - "run ```pip install boto3```" - ) - self.s3_client = boto3.client("s3", endpoint_url=endpoint_url) - self.url_scheme = url_scheme - - def download_file(self, uri: ParseResult) -> IO[bytes]: - """ - Downloads a file from AWS s3 storage and returns a TemporaryFile object - - Args: - uri (urllib.parse.ParseResult): Parsed uri of the file ex: urlparse("s3://bucket/file.avro") - Returns: - TemporaryFile object - """ - bucket, url = self._uri_to_bucket_key(uri) - file_obj = TemporaryFile() - self.s3_client.download_fileobj(bucket, url, file_obj) - return file_obj - - def list_files(self, uri: ParseResult) -> List[str]: - """ - Lists all the files under a directory in s3 if path has wildcard(*) character. - - Args: - uri (urllib.parse.ParseResult): Parsed uri of this location - - Returns: - List[str]: A list containing the full path to the file(s) in the - remote staging location. - """ - - bucket, path = self._uri_to_bucket_key(uri) - if "*" in path: - regex = re.compile(path.replace("*", ".*?").strip("/")) - blob_list = self.s3_client.list_objects( - Bucket=bucket, Prefix=path.strip("/").split("*")[0], Delimiter="/" - ) - # File path should not be in path (file path must be longer than path) - return [ - f"{self.url_scheme}://{bucket}/{file}" - for file in [x["Key"] for x in blob_list["Contents"]] - if re.match(regex, file) and file not in path - ] - else: - return [f"{self.url_scheme}://{bucket}/{path}"] - - def _uri_to_bucket_key(self, remote_path: ParseResult) -> Tuple[str, str]: - assert remote_path.hostname is not None - return remote_path.hostname, remote_path.path.lstrip("/") - - def upload_fileobj( - self, - fileobj: IO[bytes], - local_path: str, - *, - remote_uri: Optional[ParseResult] = None, - remote_path_prefix: Optional[str] = None, - remote_path_suffix: Optional[str] = None, - ) -> ParseResult: - sha256sum = _hash_fileobj(fileobj) - remote_uri = _gen_remote_uri( - fileobj, remote_uri, remote_path_prefix, remote_path_suffix, sha256sum - ) - - import botocore - - bucket, key = self._uri_to_bucket_key(remote_uri) - - try: - head_response = self.s3_client.head_object(Bucket=bucket, Key=key) - if head_response["Metadata"]["sha256sum"] == sha256sum: - # File already exists - return remote_uri - else: - print(f"Uploading {local_path} to {remote_uri}") - self.s3_client.upload_fileobj( - fileobj, - bucket, - key, - ExtraArgs={"Metadata": {"sha256sum": sha256sum}}, - ) - return remote_uri - except botocore.exceptions.ClientError as e: - if e.response["Error"]["Code"] != "404": - raise - - self.s3_client.upload_fileobj( - fileobj, bucket, key, ExtraArgs={"Metadata": {"sha256sum": sha256sum}}, - ) - return remote_uri - - -class AzureBlobClient(AbstractStagingClient): - """ - Implementation of AbstractStagingClient for Azure Blob storage - """ - - def __init__(self, account_name: str, account_access_key: str): - try: - from azure.storage.blob import BlobServiceClient - except ImportError: - raise ImportError( - "Install package azure-storage-blob for azure blob staging support" - "run ```pip install azure-storage-blob```" - ) - self.account_name = account_name - account_url = f"https://{account_name}.blob.core.windows.net" - self.blob_service_client = BlobServiceClient( - account_url=account_url, credential=account_access_key - ) - - def download_file(self, uri: ParseResult) -> IO[bytes]: - """ - Downloads a file from Azure blob storage and returns a TemporaryFile object - - Args: - uri (urllib.parse.ParseResult): Parsed uri of the file ex: urlparse("wasbs://bucket@account_name.blob.core.windows.net/file.avro") - - Returns: - TemporaryFile object - """ - bucket, path = self._uri_to_bucket_key(uri) - container_client = self.blob_service_client.get_container_client(bucket) - return container_client.download_blob(path).readall() - - def list_files(self, uri: ParseResult) -> List[str]: - """ - Lists all the files under a directory in azure blob storage if path has wildcard(*) character. - - Args: - uri (urllib.parse.ParseResult): Parsed uri of this location - - Returns: - List[str]: A list containing the full path to the file(s) in the - remote staging location. - """ - - bucket, path = self._uri_to_bucket_key(uri) - if "*" in path: - regex = re.compile(path.replace("*", ".*?").strip("/")) - container_client = self.blob_service_client.get_container_client(bucket) - blob_list = container_client.list_blobs( - name_starts_with=path.strip("/").split("*")[0] - ) - # File path should not be in path (file path must be longer than path) - return [ - f"wasbs://{bucket}@{self.account_name}.blob.core.windows.net/{file}" - for file in [x.name for x in blob_list] - if re.match(regex, file) and file not in path - ] - else: - return [ - f"wasbs://{bucket}@{self.account_name}.blob.core.windows.net/{path}" - ] - - def _uri_to_bucket_key(self, uri: ParseResult) -> Tuple[str, str]: - assert uri.hostname == f"{self.account_name}.blob.core.windows.net" - assert uri.username - bucket = uri.username - key = uri.path.lstrip("/") - return bucket, key - - def upload_fileobj( - self, - fileobj: IO[bytes], - local_path: str, - *, - remote_uri: Optional[ParseResult] = None, - remote_path_prefix: Optional[str] = None, - remote_path_suffix: Optional[str] = None, - ) -> ParseResult: - remote_uri = _gen_remote_uri( - fileobj, remote_uri, remote_path_prefix, remote_path_suffix, None - ) - bucket, key = self._uri_to_bucket_key(remote_uri) - container_client = self.blob_service_client.get_container_client(bucket) - container_client.upload_blob(name=key, data=fileobj, overwrite=True) - return remote_uri - - -class LocalFSClient(AbstractStagingClient): - """ - Implementation of AbstractStagingClient for local file - Note: The is used for E2E tests. - """ - - def __init__(self): - pass - - def download_file(self, uri: ParseResult) -> IO[bytes]: - """ - Reads a local file from the disk - - Args: - uri (urllib.parse.ParseResult): Parsed uri of the file ex: urlparse("file:///folder/file.avro") - Returns: - TemporaryFile object - """ - url = uri.path - file_obj = open(url, "rb") - return file_obj - - def list_files(self, uri: ParseResult) -> List[str]: - raise NotImplementedError("list files not implemented for Local file") - - def _uri_to_path(self, uri: ParseResult) -> str: - return uri.path - - def upload_fileobj( - self, - fileobj: IO[bytes], - local_path: str, - *, - remote_uri: Optional[ParseResult] = None, - remote_path_prefix: Optional[str] = None, - remote_path_suffix: Optional[str] = None, - ) -> ParseResult: - - remote_uri = _gen_remote_uri( - fileobj, remote_uri, remote_path_prefix, remote_path_suffix, None - ) - remote_file_path = self._uri_to_path(remote_uri) - os.makedirs(os.path.dirname(remote_file_path), exist_ok=True) - with open(remote_file_path, "wb") as fdest: - shutil.copyfileobj(fileobj, fdest) - return remote_uri - - -def _s3_client(config: Config = None): - if config is None: - endpoint_url = None - else: - endpoint_url = config.get(opt.S3_ENDPOINT_URL, None) - return S3Client(endpoint_url=endpoint_url) - - -def _s3a_client(config: Config = None): - if config is None: - endpoint_url = None - else: - endpoint_url = config.get(opt.S3_ENDPOINT_URL, None) - return S3Client(endpoint_url=endpoint_url, url_scheme="s3a") - - -def _gcs_client(config: Config = None): - return GCSClient() - - -def _azure_blob_client(config: Config = None): - if config is None: - raise Exception("Azure blob client requires config") - account_name = config.get(opt.AZURE_BLOB_ACCOUNT_NAME, None) - account_access_key = config.get(opt.AZURE_BLOB_ACCOUNT_ACCESS_KEY, None) - if account_name is None or account_access_key is None: - raise Exception( - f"Azure blob client requires {opt.AZURE_BLOB_ACCOUNT_NAME} and {opt.AZURE_BLOB_ACCOUNT_ACCESS_KEY} set in config" - ) - return AzureBlobClient(account_name, account_access_key) - - -def _local_fs_client(config: Config = None): - return LocalFSClient() - - -storage_clients = { - GS: _gcs_client, - S3: _s3_client, - S3A: _s3a_client, - AZURE_SCHEME: _azure_blob_client, - LOCAL_FILE: _local_fs_client, -} - - -def get_staging_client(scheme, config: Config = None) -> AbstractStagingClient: - """ - Initialization of a specific client object(GCSClient, S3Client etc.) - - Args: - scheme (str): uri scheme: s3, gs or file - config (Config): additional configuration - - Returns: - An object of concrete implementation of AbstractStagingClient - """ - try: - return storage_clients[scheme](config) - except ValueError: - raise Exception( - f"Could not identify file scheme {scheme}. Only gs://, file://, s3:// and wasbs:// (for Azure) are supported" - ) diff --git a/sdk/python/feast/templates/aws/__init__.py b/sdk/python/feast/templates/aws/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/feast/templates/aws/bootstrap.py b/sdk/python/feast/templates/aws/bootstrap.py index 4013ca5a8d..456c6e9b70 100644 --- a/sdk/python/feast/templates/aws/bootstrap.py +++ b/sdk/python/feast/templates/aws/bootstrap.py @@ -1,3 +1,8 @@ +import click + +from feast.infra.utils import aws_utils + + def bootstrap(): # Bootstrap() will automatically be called from the init_repo() during `feast init` @@ -6,21 +11,59 @@ def bootstrap(): from feast.driver_test_data import create_driver_hourly_stats_df - repo_path = pathlib.Path(__file__).parent.absolute() - data_path = repo_path / "data" - data_path.mkdir(exist_ok=True) - end_date = datetime.now().replace(microsecond=0, second=0, minute=0) start_date = end_date - timedelta(days=15) driver_entities = [1001, 1002, 1003, 1004, 1005] driver_df = create_driver_hourly_stats_df(driver_entities, start_date, end_date) - driver_stats_path = data_path / "driver_stats.parquet" - driver_df.to_parquet(path=str(driver_stats_path), allow_truncated_timestamps=True) + aws_region = click.prompt("AWS Region (e.g. us-west-2)") + cluster_id = click.prompt("Redshift Cluster ID") + database = click.prompt("Redshift Database Name") + user = click.prompt("Redshift User Name") + s3_staging_location = click.prompt("Redshift S3 Staging Location (s3://*)") + iam_role = click.prompt("Redshift IAM Role for S3 (arn:aws:iam::*:role/*)") + + if click.confirm( + "Should I upload example data to Redshift (overwriting 'feast_driver_hourly_stats' table)?", + default=True, + ): + client = aws_utils.get_redshift_data_client(aws_region) + s3 = aws_utils.get_s3_resource(aws_region) + + aws_utils.execute_redshift_statement( + client, + cluster_id, + database, + user, + "DROP TABLE IF EXISTS feast_driver_hourly_stats", + ) + + aws_utils.upload_df_to_redshift( + client, + cluster_id, + database, + user, + s3, + f"{s3_staging_location}/data/feast_driver_hourly_stats.parquet", + iam_role, + "feast_driver_hourly_stats", + driver_df, + ) + + repo_path = pathlib.Path(__file__).parent.absolute() + config_file = repo_path / "feature_store.yaml" + driver_file = repo_path / "driver_repo.py" - example_py_file = repo_path / "example.py" - replace_str_in_file(example_py_file, "%PARQUET_PATH%", str(driver_stats_path)) + replace_str_in_file(config_file, "%AWS_REGION%", aws_region) + replace_str_in_file(config_file, "%REDSHIFT_CLUSTER_ID%", cluster_id) + replace_str_in_file(config_file, "%REDSHIFT_DATABASE%", database) + replace_str_in_file(driver_file, "%REDSHIFT_DATABASE%", database) + replace_str_in_file(config_file, "%REDSHIFT_USER%", user) + replace_str_in_file( + config_file, "%REDSHIFT_S3_STAGING_LOCATION%", s3_staging_location + ) + replace_str_in_file(config_file, "%REDSHIFT_IAM_ROLE%", iam_role) def replace_str_in_file(file_path, match_str, sub_str): diff --git a/sdk/python/feast/templates/aws/driver_repo.py b/sdk/python/feast/templates/aws/driver_repo.py new file mode 100644 index 0000000000..5188f57cf8 --- /dev/null +++ b/sdk/python/feast/templates/aws/driver_repo.py @@ -0,0 +1,67 @@ +from datetime import timedelta + +from feast import Entity, FeatureView, Field, RedshiftSource, ValueType +from feast.types import Float32, Int64 + +# Define an entity for the driver. Entities can be thought of as primary keys used to +# retrieve features. Entities are also used to join multiple tables/views during the +# construction of feature vectors +driver = Entity( + # Name of the entity. Must be unique within a project + name="driver", + # The join keys of an entity describe the storage level field/column on which + # features can be looked up. The join keys are also used to join feature + # tables/views when building feature vectors + join_keys=["driver_id"], + # The storage level type for an entity + value_type=ValueType.INT64, +) + +# Indicates a data source from which feature values can be retrieved. Sources are queried when building training +# datasets or materializing features into an online store. +driver_stats_source = RedshiftSource( + # The Redshift table where features can be found + table="feast_driver_hourly_stats", + # The event timestamp is used for point-in-time joins and for ensuring only + # features within the TTL are returned + timestamp_field="event_timestamp", + # The (optional) created timestamp is used to ensure there are no duplicate + # feature rows in the offline store or when building training datasets + created_timestamp_column="created", + # Database to redshift source. + database="%REDSHIFT_DATABASE%", +) + +# Feature views are a grouping based on how features are stored in either the +# online or offline store. +driver_stats_fv = FeatureView( + # The unique name of this feature view. Two feature views in a single + # project cannot have the same name + name="driver_hourly_stats", + # The list of entities specifies the keys required for joining or looking + # up features from this feature view. The reference provided in this field + # correspond to the name of a defined entity (or entities) + entities=["driver"], + # The timedelta is the maximum age that each feature value may have + # relative to its lookup time. For historical features (used in training), + # TTL is relative to each timestamp provided in the entity dataframe. + # TTL also allows for eviction of keys from online stores and limits the + # amount of historical scanning required for historical feature values + # during retrieval + ttl=timedelta(weeks=52), + # The list of features defined below act as a schema to both define features + # for both materialization of features into a store, and are used as references + # during retrieval for building a training dataset or serving features + schema=[ + Field(name="conv_rate", dtype=Float32), + Field(name="acc_rate", dtype=Float32), + Field(name="avg_daily_trips", dtype=Int64), + ], + # Batch sources are used to find feature values. In the case of this feature + # view we will query a source table on Redshift for driver statistics + # features + source=driver_stats_source, + # Tags are user defined key/value pairs that are attached to each + # feature view + tags={"team": "driver_performance"}, +) diff --git a/sdk/python/feast/templates/aws/feature_store.yaml b/sdk/python/feast/templates/aws/feature_store.yaml index 7f7be8527e..27d1c6879f 100644 --- a/sdk/python/feast/templates/aws/feature_store.yaml +++ b/sdk/python/feast/templates/aws/feature_store.yaml @@ -1,3 +1,14 @@ project: my_project registry: data/registry.db provider: aws +online_store: + type: dynamodb + region: %AWS_REGION% +offline_store: + type: redshift + cluster_id: %REDSHIFT_CLUSTER_ID% + region: %AWS_REGION% + database: %REDSHIFT_DATABASE% + user: %REDSHIFT_USER% + s3_staging_location: %REDSHIFT_S3_STAGING_LOCATION% + iam_role: %REDSHIFT_IAM_ROLE% diff --git a/sdk/python/feast/templates/aws/test.py b/sdk/python/feast/templates/aws/test.py index cc2cf7e984..07410954f7 100644 --- a/sdk/python/feast/templates/aws/test.py +++ b/sdk/python/feast/templates/aws/test.py @@ -1,7 +1,7 @@ -from datetime import datetime +from datetime import datetime, timedelta import pandas as pd -from example import driver, driver_hourly_stats_view +from driver_repo import driver, driver_stats_fv from feast import FeatureStore @@ -15,22 +15,49 @@ def main(): # Deploy the feature store to AWS print("Deploying feature store to AWS...") - fs.apply([driver, driver_hourly_stats_view]) + fs.apply([driver, driver_stats_fv]) # Select features - feature_refs = ["driver_hourly_stats:conv_rate", "driver_hourly_stats:acc_rate"] + features = ["driver_hourly_stats:conv_rate", "driver_hourly_stats:acc_rate"] + # Create an entity dataframe. This is the dataframe that will be enriched with historical features + entity_df = pd.DataFrame( + { + "event_timestamp": [ + pd.Timestamp(dt, unit="ms", tz="UTC").round("ms") + for dt in pd.date_range( + start=datetime.now() - timedelta(days=3), + end=datetime.now(), + periods=3, + ) + ], + "driver_id": [1001, 1002, 1003], + } + ) + + print("Retrieving training data...") + + # Retrieve historical features by joining the entity dataframe to the Redshift table source + training_df = fs.get_historical_features( + features=features, entity_df=entity_df + ).to_df() + + print() + print(training_df) + + print() print("Loading features into the online store...") fs.materialize_incremental(end_date=datetime.now()) + print() print("Retrieving online features...") - # Retrieve features from the online store (DynamoDB) + # Retrieve features from the online store (Firestore) online_features = fs.get_online_features( - feature_refs=feature_refs, - entity_rows=[{"driver_id": 1001}, {"driver_id": 1002}], + features=features, entity_rows=[{"driver_id": 1001}, {"driver_id": 1002}], ).to_dict() + print() print(pd.DataFrame.from_dict(online_features)) diff --git a/sdk/python/feast/templates/gcp/__init__.py b/sdk/python/feast/templates/gcp/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/feast/templates/gcp/driver_repo.py b/sdk/python/feast/templates/gcp/driver_repo.py index b36758046d..7d137f996b 100644 --- a/sdk/python/feast/templates/gcp/driver_repo.py +++ b/sdk/python/feast/templates/gcp/driver_repo.py @@ -1,17 +1,18 @@ from datetime import timedelta -from feast import BigQuerySource, Entity, Feature, FeatureView, ValueType +from feast import BigQuerySource, Entity, FeatureView, Field, ValueType +from feast.types import Float32, Int64 # Define an entity for the driver. Entities can be thought of as primary keys used to # retrieve features. Entities are also used to join multiple tables/views during the # construction of feature vectors driver = Entity( # Name of the entity. Must be unique within a project - name="driver_id", - # The join key of an entity describes the storage level field/column on which - # features can be looked up. The join key is also used to join feature + name="driver", + # The join keys of an entity describe the storage level field/column on which + # features can be looked up. The join keys are also used to join feature # tables/views when building feature vectors - join_key="driver_id", + join_keys=["driver_id"], # The storage level type for an entity value_type=ValueType.INT64, ) @@ -20,10 +21,10 @@ # datasets or materializing features into an online store. driver_stats_source = BigQuerySource( # The BigQuery table where features can be found - table_ref="feast-oss.demo_data.driver_hourly_stats", + table="feast-oss.demo_data.driver_hourly_stats_2", # The event timestamp is used for point-in-time joins and for ensuring only # features within the TTL are returned - event_timestamp_column="datetime", + timestamp_field="event_timestamp", # The (optional) created timestamp is used to ensure there are no duplicate # feature rows in the offline store or when building training datasets created_timestamp_column="created", @@ -38,7 +39,7 @@ # The list of entities specifies the keys required for joining or looking # up features from this feature view. The reference provided in this field # correspond to the name of a defined entity (or entities) - entities=["driver_id"], + entities=["driver"], # The timedelta is the maximum age that each feature value may have # relative to its lookup time. For historical features (used in training), # TTL is relative to each timestamp provided in the entity dataframe. @@ -49,15 +50,15 @@ # The list of features defined below act as a schema to both define features # for both materialization of features into a store, and are used as references # during retrieval for building a training dataset or serving features - features=[ - Feature(name="conv_rate", dtype=ValueType.FLOAT), - Feature(name="acc_rate", dtype=ValueType.FLOAT), - Feature(name="avg_daily_trips", dtype=ValueType.INT64), + schema=[ + Field(name="conv_rate", dtype=Float32), + Field(name="acc_rate", dtype=Float32), + Field(name="avg_daily_trips", dtype=Int64), ], - # Inputs are used to find feature values. In the case of this feature + # Batch sources are used to find feature values. In the case of this feature # view we will query a source table on BigQuery for driver statistics # features - input=driver_stats_source, + source=driver_stats_source, # Tags are user defined key/value pairs that are attached to each # feature view tags={"team": "driver_performance"}, diff --git a/sdk/python/feast/templates/gcp/test.py b/sdk/python/feast/templates/gcp/test.py index aee8950077..538334044b 100644 --- a/sdk/python/feast/templates/gcp/test.py +++ b/sdk/python/feast/templates/gcp/test.py @@ -18,7 +18,7 @@ def main(): fs.apply([driver, driver_stats_fv]) # Select features - feature_refs = ["driver_hourly_stats:conv_rate", "driver_hourly_stats:acc_rate"] + features = ["driver_hourly_stats:conv_rate", "driver_hourly_stats:acc_rate"] # Create an entity dataframe. This is the dataframe that will be enriched with historical features entity_df = pd.DataFrame( @@ -39,7 +39,7 @@ def main(): # Retrieve historical features by joining the entity dataframe to the BigQuery table source training_df = fs.get_historical_features( - feature_refs=feature_refs, entity_df=entity_df + features=features, entity_df=entity_df ).to_df() print() @@ -54,8 +54,7 @@ def main(): # Retrieve features from the online store (Firestore) online_features = fs.get_online_features( - feature_refs=feature_refs, - entity_rows=[{"driver_id": 1001}, {"driver_id": 1002}], + features=features, entity_rows=[{"driver_id": 1001}, {"driver_id": 1002}], ).to_dict() print() diff --git a/sdk/python/feast/templates/local/__init__.py b/sdk/python/feast/templates/local/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/feast/templates/local/example.py b/sdk/python/feast/templates/local/example.py index f9f2b3b6eb..1d441e0e99 100644 --- a/sdk/python/feast/templates/local/example.py +++ b/sdk/python/feast/templates/local/example.py @@ -1,35 +1,36 @@ # This is an example feature definition file -from google.protobuf.duration_pb2 import Duration +from datetime import timedelta -from feast import Entity, Feature, FeatureView, FileSource, ValueType +from feast import Entity, FeatureView, Field, FileSource, ValueType +from feast.types import Float32, Int64 # Read data from parquet files. Parquet is convenient for local development mode. For # production, you can use your favorite DWH, such as BigQuery. See Feast documentation # for more info. driver_hourly_stats = FileSource( path="%PARQUET_PATH%", - event_timestamp_column="datetime", + timestamp_field="event_timestamp", created_timestamp_column="created", ) # Define an entity for the driver. You can think of entity as a primary key used to # fetch features. -driver = Entity(name="driver_id", value_type=ValueType.INT64, description="driver id",) +driver = Entity(name="driver", join_keys=["driver_id"], value_type=ValueType.INT64,) # Our parquet files contain sample data that includes a driver_id column, timestamps and # three feature column. Here we define a Feature View that will allow us to serve this # data to our model online. driver_hourly_stats_view = FeatureView( name="driver_hourly_stats", - entities=["driver_id"], - ttl=Duration(seconds=86400 * 1), - features=[ - Feature(name="conv_rate", dtype=ValueType.FLOAT), - Feature(name="acc_rate", dtype=ValueType.FLOAT), - Feature(name="avg_daily_trips", dtype=ValueType.INT64), + entities=["driver"], + ttl=timedelta(days=1), + schema=[ + Field(name="conv_rate", dtype=Float32), + Field(name="acc_rate", dtype=Float32), + Field(name="avg_daily_trips", dtype=Int64), ], online=True, - input=driver_hourly_stats, + source=driver_hourly_stats, tags={}, ) diff --git a/sdk/python/feast/templates/snowflake/bootstrap.py b/sdk/python/feast/templates/snowflake/bootstrap.py new file mode 100644 index 0000000000..194ba08c08 --- /dev/null +++ b/sdk/python/feast/templates/snowflake/bootstrap.py @@ -0,0 +1,93 @@ +import click +import snowflake.connector + +from feast.infra.utils.snowflake_utils import write_pandas + + +def bootstrap(): + # Bootstrap() will automatically be called from the init_repo() during `feast init` + + import pathlib + from datetime import datetime, timedelta + + from feast.driver_test_data import create_driver_hourly_stats_df + + repo_path = pathlib.Path(__file__).parent.absolute() + config_file = repo_path / "feature_store.yaml" + + project_name = str(repo_path)[str(repo_path).rfind("/") + 1 :] + + end_date = datetime.now().replace(microsecond=0, second=0, minute=0) + start_date = end_date - timedelta(days=15) + + driver_entities = [1001, 1002, 1003, 1004, 1005] + driver_df = create_driver_hourly_stats_df(driver_entities, start_date, end_date) + + repo_path = pathlib.Path(__file__).parent.absolute() + data_path = repo_path / "data" + data_path.mkdir(exist_ok=True) + driver_stats_path = data_path / "driver_stats.parquet" + driver_df.to_parquet(path=str(driver_stats_path), allow_truncated_timestamps=True) + + snowflake_deployment_url = click.prompt( + "Snowflake Deployment URL (exclude .snowflakecomputing.com):" + ) + snowflake_user = click.prompt("Snowflake User Name:") + snowflake_password = click.prompt("Snowflake Password:", hide_input=True) + snowflake_role = click.prompt("Snowflake Role Name (Case Sensitive):") + snowflake_warehouse = click.prompt("Snowflake Warehouse Name (Case Sensitive):") + snowflake_database = click.prompt("Snowflake Database Name (Case Sensitive):") + + if click.confirm( + f'Should I upload example data to Snowflake (overwriting "{project_name}_feast_driver_hourly_stats" table)?', + default=True, + ): + + conn = snowflake.connector.connect( + account=snowflake_deployment_url, + user=snowflake_user, + password=snowflake_password, + role=snowflake_role, + warehouse=snowflake_warehouse, + application="feast", + ) + + cur = conn.cursor() + cur.execute(f'CREATE DATABASE IF NOT EXISTS "{snowflake_database}"') + cur.execute(f'USE DATABASE "{snowflake_database}"') + cur.execute('CREATE SCHEMA IF NOT EXISTS "PUBLIC"') + cur.execute('USE SCHEMA "PUBLIC"') + cur.execute(f'DROP TABLE IF EXISTS "{project_name}_feast_driver_hourly_stats"') + write_pandas( + conn, + driver_df, + f"{project_name}_feast_driver_hourly_stats", + auto_create_table=True, + ) + conn.close() + + repo_path = pathlib.Path(__file__).parent.absolute() + config_file = repo_path / "feature_store.yaml" + driver_file = repo_path / "driver_repo.py" + replace_str_in_file( + config_file, "SNOWFLAKE_DEPLOYMENT_URL", snowflake_deployment_url + ) + replace_str_in_file(config_file, "SNOWFLAKE_USER", snowflake_user) + replace_str_in_file(config_file, "SNOWFLAKE_PASSWORD", snowflake_password) + replace_str_in_file(config_file, "SNOWFLAKE_ROLE", snowflake_role) + replace_str_in_file(config_file, "SNOWFLAKE_WAREHOUSE", snowflake_warehouse) + replace_str_in_file(config_file, "SNOWFLAKE_DATABASE", snowflake_database) + + replace_str_in_file(driver_file, "SNOWFLAKE_WAREHOUSE", snowflake_warehouse) + + +def replace_str_in_file(file_path, match_str, sub_str): + with open(file_path, "r") as f: + contents = f.read() + contents = contents.replace(match_str, sub_str) + with open(file_path, "wt") as f: + f.write(contents) + + +if __name__ == "__main__": + bootstrap() diff --git a/sdk/python/feast/templates/snowflake/driver_repo.py b/sdk/python/feast/templates/snowflake/driver_repo.py new file mode 100644 index 0000000000..ecccb9863b --- /dev/null +++ b/sdk/python/feast/templates/snowflake/driver_repo.py @@ -0,0 +1,66 @@ +from datetime import timedelta + +import yaml + +from feast import Entity, FeatureView, Field, SnowflakeSource +from feast.types import Float32, Int64 + +# Define an entity for the driver. Entities can be thought of as primary keys used to +# retrieve features. Entities are also used to join multiple tables/views during the +# construction of feature vectors +driver = Entity( + # Name of the entity. Must be unique within a project + name="driver", + # The join keys of an entity describe the storage level field/column on which + # features can be looked up. The join keys are also used to join feature + # tables/views when building feature vectors + join_keys=["driver_id"], +) + +# Indicates a data source from which feature values can be retrieved. Sources are queried when building training +# datasets or materializing features into an online store. +project_name = yaml.safe_load(open("feature_store.yaml"))["project"] + +driver_stats_source = SnowflakeSource( + # The Snowflake table where features can be found + database=yaml.safe_load(open("feature_store.yaml"))["offline_store"]["database"], + table=f"{project_name}_feast_driver_hourly_stats", + warehouse="SNOWFLAKE_WAREHOUSE", + # The event timestamp is used for point-in-time joins and for ensuring only + # features within the TTL are returned + timestamp_field="event_timestamp", + # The (optional) created timestamp is used to ensure there are no duplicate + # feature rows in the offline store or when building training datasets + created_timestamp_column="created", +) + +# Feature views are a grouping based on how features are stored in either the +# online or offline store. +driver_stats_fv = FeatureView( + # The unique name of this feature view. Two feature views in a single + # project cannot have the same name + name="driver_hourly_stats", + # The list of entities specifies the keys required for joining or looking + # up features from this feature view. The reference provided in this field + # correspond to the name of a defined entity (or entities) + entities=["driver"], + # The timedelta is the maximum age that each feature value may have + # relative to its lookup time. For historical features (used in training), + # TTL is relative to each timestamp provided in the entity dataframe. + # TTL also allows for eviction of keys from online stores and limits the + # amount of historical scanning required for historical feature values + # during retrieval + ttl=timedelta(weeks=52), + # The list of features defined below act as a schema to both define features + # for both materialization of features into a store, and are used as references + # during retrieval for building a training dataset or serving features + schema=[ + Field(name="conv_rate", dtype=Float32), + Field(name="acc_rate", dtype=Float32), + Field(name="avg_daily_trips", dtype=Int64), + ], + # Batch sources are used to find feature values. In the case of this feature + # view we will query a source table on Redshift for driver statistics + # features + batch_source=driver_stats_source, +) diff --git a/sdk/python/feast/templates/snowflake/feature_store.yaml b/sdk/python/feast/templates/snowflake/feature_store.yaml new file mode 100644 index 0000000000..9757ea2ead --- /dev/null +++ b/sdk/python/feast/templates/snowflake/feature_store.yaml @@ -0,0 +1,11 @@ +project: my_project +registry: registry.db +provider: local +offline_store: + type: snowflake.offline + account: SNOWFLAKE_DEPLOYMENT_URL + user: SNOWFLAKE_USER + password: SNOWFLAKE_PASSWORD + role: SNOWFLAKE_ROLE + warehouse: SNOWFLAKE_WAREHOUSE + database: SNOWFLAKE_DATABASE diff --git a/sdk/python/feast/templates/snowflake/test.py b/sdk/python/feast/templates/snowflake/test.py new file mode 100644 index 0000000000..32aa6380d5 --- /dev/null +++ b/sdk/python/feast/templates/snowflake/test.py @@ -0,0 +1,65 @@ +from datetime import datetime, timedelta + +import pandas as pd +from driver_repo import driver, driver_stats_fv + +from feast import FeatureStore + + +def main(): + pd.set_option("display.max_columns", None) + pd.set_option("display.width", 1000) + + # Load the feature store from the current path + fs = FeatureStore(repo_path=".") + + # Deploy the feature store to Snowflake + print("Deploying feature store to Snowflake...") + fs.apply([driver, driver_stats_fv]) + + # Select features + features = ["driver_hourly_stats:conv_rate", "driver_hourly_stats:acc_rate"] + + # Create an entity dataframe. This is the dataframe that will be enriched with historical features + entity_df = pd.DataFrame( + { + "event_timestamp": [ + pd.Timestamp(dt, unit="ms", tz="UTC").round("ms") + for dt in pd.date_range( + start=datetime.now() - timedelta(days=3), + end=datetime.now(), + periods=3, + ) + ], + "driver_id": [1001, 1002, 1003], + } + ) + + print("Retrieving training data...") + + # Retrieve historical features by joining the entity dataframe to the Snowflake table source + training_df = fs.get_historical_features( + features=features, entity_df=entity_df + ).to_df() + + print() + print(training_df) + + print() + print("Loading features into the online store...") + fs.materialize_incremental(end_date=datetime.now()) + + print() + print("Retrieving online features...") + + # Retrieve features from the online store + online_features = fs.get_online_features( + features=features, entity_rows=[{"driver_id": 1001}, {"driver_id": 1002}], + ).to_dict() + + print() + print(pd.DataFrame.from_dict(online_features)) + + +if __name__ == "__main__": + main() diff --git a/sdk/python/feast/templates/spark/__init__.py b/sdk/python/feast/templates/spark/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/feast/templates/spark/bootstrap.py b/sdk/python/feast/templates/spark/bootstrap.py new file mode 100644 index 0000000000..b57387d3d7 --- /dev/null +++ b/sdk/python/feast/templates/spark/bootstrap.py @@ -0,0 +1,37 @@ +def bootstrap(): + # Bootstrap() will automatically be called from the init_repo() during `feast init` + import pathlib + from datetime import datetime, timedelta + + from feast.driver_test_data import ( + create_customer_daily_profile_df, + create_driver_hourly_stats_df, + ) + + repo_path = pathlib.Path(__file__).parent.absolute() + data_path = repo_path / "data" + data_path.mkdir(exist_ok=True) + + driver_entities = [1001, 1002, 1003] + end_date = datetime.now().replace(microsecond=0, second=0, minute=0) + start_date = end_date - timedelta(days=15) + driver_stats_df = create_driver_hourly_stats_df( + driver_entities, start_date, end_date + ) + driver_stats_df.to_parquet( + path=str(data_path / "driver_hourly_stats.parquet"), + allow_truncated_timestamps=True, + ) + + customer_entities = [201, 202, 203] + customer_profile_df = create_customer_daily_profile_df( + customer_entities, start_date, end_date + ) + customer_profile_df.to_parquet( + path=str(data_path / "customer_daily_profile.parquet"), + allow_truncated_timestamps=True, + ) + + +if __name__ == "__main__": + bootstrap() diff --git a/sdk/python/feast/templates/spark/example.py b/sdk/python/feast/templates/spark/example.py new file mode 100644 index 0000000000..58f3df740f --- /dev/null +++ b/sdk/python/feast/templates/spark/example.py @@ -0,0 +1,66 @@ +# # # # # # # # # # # # # # # # # # # # # # # # +# This is an example feature definition file # +# # # # # # # # # # # # # # # # # # # # # # # # + +from datetime import timedelta +from pathlib import Path + +from feast import Entity, FeatureView, Field, ValueType +from feast.infra.offline_stores.contrib.spark_offline_store.spark_source import ( + SparkSource, +) +from feast.types import Float32, Int64 + +# Constants related to the generated data sets +CURRENT_DIR = Path(__file__).parent + + +# Entity definitions +driver = Entity(name="driver", value_type=ValueType.INT64, description="driver id",) +customer = Entity( + name="customer", value_type=ValueType.INT64, description="customer id", +) + +# Sources +driver_hourly_stats = SparkSource( + name="driver_hourly_stats", + path=f"{CURRENT_DIR}/data/driver_hourly_stats.parquet", + file_format="parquet", + timestamp_field="event_timestamp", + created_timestamp_column="created", +) +customer_daily_profile = SparkSource( + name="customer_daily_profile", + path=f"{CURRENT_DIR}/data/customer_daily_profile.parquet", + file_format="parquet", + timestamp_field="event_timestamp", + created_timestamp_column="created", +) + +# Feature Views +driver_hourly_stats_view = FeatureView( + name="driver_hourly_stats", + entities=["driver"], + ttl=timedelta(days=7), + schema=[ + Field(name="conv_rate", dtype=Float32), + Field(name="acc_rate", dtype=Float32), + Field(name="avg_daily_trips", dtype=Int64), + ], + online=True, + source=driver_hourly_stats, + tags={}, +) +customer_daily_profile_view = FeatureView( + name="customer_daily_profile", + entities=["customer"], + ttl=timedelta(days=7), + schema=[ + Field(name="current_balance", dtype=Float32), + Field(name="avg_passenger_count", dtype=Float32), + Field(name="lifetime_trip_count", dtype=Int64), + ], + online=True, + source=customer_daily_profile, + tags={}, +) diff --git a/sdk/python/feast/templates/spark/feature_store.yaml b/sdk/python/feast/templates/spark/feature_store.yaml new file mode 100644 index 0000000000..2ea0ddfcc9 --- /dev/null +++ b/sdk/python/feast/templates/spark/feature_store.yaml @@ -0,0 +1,14 @@ +project: my_project +registry: data/registry.db +provider: local +offline_store: + type: spark + spark_conf: + spark.master: "local[*]" + spark.ui.enabled: "false" + spark.eventLog.enabled: "false" + spark.sql.catalogImplementation: "hive" + spark.sql.parser.quotedRegexColumnNames: "true" + spark.sql.session.timeZone: "UTC" +online_store: + path: data/online_store.db diff --git a/sdk/python/feast/transformation_server.py b/sdk/python/feast/transformation_server.py new file mode 100644 index 0000000000..83f4af749e --- /dev/null +++ b/sdk/python/feast/transformation_server.py @@ -0,0 +1,74 @@ +import logging +import sys +from concurrent import futures + +import grpc +import pyarrow as pa +from grpc_reflection.v1alpha import reflection + +from feast.errors import OnDemandFeatureViewNotFoundException +from feast.feature_store import FeatureStore +from feast.protos.feast.serving.TransformationService_pb2 import ( + DESCRIPTOR, + TRANSFORMATION_SERVICE_TYPE_PYTHON, + GetTransformationServiceInfoResponse, + TransformFeaturesResponse, + ValueType, +) +from feast.protos.feast.serving.TransformationService_pb2_grpc import ( + TransformationServiceServicer, + add_TransformationServiceServicer_to_server, +) +from feast.version import get_version + +log = logging.getLogger(__name__) + + +class TransformationServer(TransformationServiceServicer): + def __init__(self, fs: FeatureStore) -> None: + super().__init__() + self.fs = fs + + def GetTransformationServiceInfo(self, request, context): + response = GetTransformationServiceInfoResponse( + type=TRANSFORMATION_SERVICE_TYPE_PYTHON, + transformation_service_type_details=f"Python: {sys.version}, Feast: {get_version()}", + ) + return response + + def TransformFeatures(self, request, context): + try: + odfv = self.fs.get_on_demand_feature_view( + request.on_demand_feature_view_name + ) + except OnDemandFeatureViewNotFoundException: + context.set_code(grpc.StatusCode.INVALID_ARGUMENT) + raise + + df = pa.ipc.open_file(request.transformation_input.arrow_value).read_pandas() + + result_df = odfv.get_transformed_features_df(df, True) + result_arrow = pa.Table.from_pandas(result_df) + sink = pa.BufferOutputStream() + writer = pa.ipc.new_file(sink, result_arrow.schema) + writer.write_table(result_arrow) + writer.close() + + buf = sink.getvalue().to_pybytes() + + return TransformFeaturesResponse( + transformation_output=ValueType(arrow_value=buf) + ) + + +def start_server(store: FeatureStore, port: int): + server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) + add_TransformationServiceServicer_to_server(TransformationServer(store), server) + service_names_available_for_reflection = ( + DESCRIPTOR.services_by_name["TransformationService"].full_name, + reflection.SERVICE_NAME, + ) + reflection.enable_server_reflection(service_names_available_for_reflection, server) + server.add_insecure_port(f"[::]:{port}") + server.start() + server.wait_for_termination() diff --git a/sdk/python/feast/type_map.py b/sdk/python/feast/type_map.py index 53ab1183c2..a94d8aa59b 100644 --- a/sdk/python/feast/type_map.py +++ b/sdk/python/feast/type_map.py @@ -12,12 +12,27 @@ # See the License for the specific language governing permissions and # limitations under the License. -import re -from typing import Any, Dict, Union +from collections import defaultdict +from datetime import datetime, timezone +from typing import ( + Any, + Dict, + Iterator, + List, + Optional, + Sequence, + Set, + Sized, + Tuple, + Type, + Union, + cast, +) import numpy as np import pandas as pd -from google.protobuf.json_format import MessageToDict +import pyarrow +from google.protobuf.timestamp_pb2 import Timestamp from feast.protos.feast.types.Value_pb2 import ( BoolList, @@ -29,7 +44,7 @@ StringList, ) from feast.protos.feast.types.Value_pb2 import Value as ProtoValue -from feast.value_type import ValueType +from feast.value_type import ListType, ValueType def feast_value_type_to_python_type(field_value_proto: ProtoValue) -> Any: @@ -43,35 +58,47 @@ def feast_value_type_to_python_type(field_value_proto: ProtoValue) -> Any: Returns: Python native type representation/version of the given field_value_proto """ - field_value_dict = MessageToDict(field_value_proto) - - for k, v in field_value_dict.items(): - if k == "int64Val": - return int(v) - if k == "bytesVal": - return bytes(v) - if (k == "int64ListVal") or (k == "int32ListVal"): - return [int(item) for item in v["val"]] - if (k == "floatListVal") or (k == "doubleListVal"): - return [float(item) for item in v["val"]] - if k == "stringListVal": - return [str(item) for item in v["val"]] - if k == "bytesListVal": - return [bytes(item) for item in v["val"]] - if k == "boolListVal": - return [bool(item) for item in v["val"]] - - if k in ["int32Val", "floatVal", "doubleVal", "stringVal", "boolVal"]: - return v - else: - raise TypeError( - f"Casting to Python native type for type {k} failed. " - f"Type {k} not found" - ) + val_attr = field_value_proto.WhichOneof("val") + if val_attr is None: + return None + val = getattr(field_value_proto, val_attr) + + # If it's a _LIST type extract the list. + if hasattr(val, "val"): + val = list(val.val) + + # Convert UNIX_TIMESTAMP values to `datetime` + if val_attr == "unix_timestamp_list_val": + val = [datetime.fromtimestamp(v, tz=timezone.utc) for v in val] + elif val_attr == "unix_timestamp_val": + val = datetime.fromtimestamp(val, tz=timezone.utc) + + return val + + +def feast_value_type_to_pandas_type(value_type: ValueType) -> Any: + value_type_to_pandas_type: Dict[ValueType, str] = { + ValueType.FLOAT: "float", + ValueType.INT32: "int", + ValueType.INT64: "int", + ValueType.STRING: "str", + ValueType.DOUBLE: "float", + ValueType.BYTES: "bytes", + ValueType.BOOL: "bool", + ValueType.UNIX_TIMESTAMP: "datetime64[ns]", + } + if value_type.name.endswith("_LIST"): + return "object" + if value_type in value_type_to_pandas_type: + return value_type_to_pandas_type[value_type] + raise TypeError( + f"Casting to pandas type for type {value_type} failed. " + f"Type {value_type} not found" + ) def python_type_to_feast_value_type( - name: str, value, recurse: bool = True + name: str, value: Any = None, recurse: bool = True, type_name: Optional[str] = None ) -> ValueType: """ Finds the equivalent Feast Value Type for a Python value. Both native @@ -86,12 +113,12 @@ def python_type_to_feast_value_type( Returns: Feast Value Type """ - - type_name = type(value).__name__ + type_name = (type_name or type(value).__name__).lower() type_map = { "int": ValueType.INT64, "str": ValueType.STRING, + "string": ValueType.STRING, # pandas.StringDtype "float": ValueType.DOUBLE, "bytes": ValueType.BYTES, "float64": ValueType.DOUBLE, @@ -100,10 +127,14 @@ def python_type_to_feast_value_type( "uint64": ValueType.INT64, "int32": ValueType.INT32, "uint32": ValueType.INT32, + "int16": ValueType.INT32, + "uint16": ValueType.INT32, "uint8": ValueType.INT32, "int8": ValueType.INT32, "bool": ValueType.BOOL, "timedelta": ValueType.UNIX_TIMESTAMP, + "timestamp": ValueType.UNIX_TIMESTAMP, + "datetime": ValueType.UNIX_TIMESTAMP, "datetime64[ns]": ValueType.UNIX_TIMESTAMP, "datetime64[ns, tz]": ValueType.UNIX_TIMESTAMP, "category": ValueType.STRING, @@ -112,203 +143,271 @@ def python_type_to_feast_value_type( if type_name in type_map: return type_map[type_name] - if type_name == "ndarray" or isinstance(value, list): - if recurse: - - # Convert to list type - list_items = pd.core.series.Series(value) - - # This is the final type which we infer from the list - common_item_value_type = None - for item in list_items: - if isinstance(item, ProtoValue): - current_item_value_type = _proto_str_to_value_type( - str(item.WhichOneof("val")) - ) - else: - # Get the type from the current item, only one level deep - current_item_value_type = python_type_to_feast_value_type( - name=name, value=item, recurse=False - ) - # Validate whether the type stays consistent - if ( - common_item_value_type - and not common_item_value_type == current_item_value_type - ): - raise ValueError( - f"List value type for field {name} is inconsistent. " - f"{common_item_value_type} different from " - f"{current_item_value_type}." - ) - common_item_value_type = current_item_value_type - if common_item_value_type is None: - raise ValueError( - f"field {name} cannot have null values for type inference." - ) - return ValueType[common_item_value_type.name + "_LIST"] - else: + if isinstance(value, np.ndarray) and str(value.dtype) in type_map: + item_type = type_map[str(value.dtype)] + return ValueType[item_type.name + "_LIST"] + + if isinstance(value, (list, np.ndarray)): + # if the value's type is "ndarray" and we couldn't infer from "value.dtype" + # this is most probably array of "object", + # so we need to iterate over objects and try to infer type of each item + if not recurse: raise ValueError( - f"Value type for field {name} is {value.dtype.__str__()} but " + f"Value type for field {name} is {type(value)} but " f"recursion is not allowed. Array types can only be one level " f"deep." ) - return type_map[value.dtype.__str__()] + # This is the final type which we infer from the list + common_item_value_type = None + for item in value: + if isinstance(item, ProtoValue): + current_item_value_type: ValueType = _proto_value_to_value_type(item) + else: + # Get the type from the current item, only one level deep + current_item_value_type = python_type_to_feast_value_type( + name=name, value=item, recurse=False + ) + # Validate whether the type stays consistent + if ( + common_item_value_type + and not common_item_value_type == current_item_value_type + ): + raise ValueError( + f"List value type for field {name} is inconsistent. " + f"{common_item_value_type} different from " + f"{current_item_value_type}." + ) + common_item_value_type = current_item_value_type + if common_item_value_type is None: + return ValueType.UNKNOWN + return ValueType[common_item_value_type.name + "_LIST"] + + raise ValueError( + f"Value with native type {type_name} " + f"cannot be converted into Feast value type" + ) + + +def python_values_to_feast_value_type( + name: str, values: Any, recurse: bool = True +) -> ValueType: + inferred_dtype = ValueType.UNKNOWN + for row in values: + current_dtype = python_type_to_feast_value_type( + name, value=row, recurse=recurse + ) + + if inferred_dtype is ValueType.UNKNOWN: + inferred_dtype = current_dtype + else: + if current_dtype != inferred_dtype and current_dtype not in ( + ValueType.UNKNOWN, + ValueType.NULL, + ): + raise TypeError( + f"Input entity {name} has mixed types, {current_dtype} and {inferred_dtype}. That is not allowed. " + ) + if inferred_dtype in (ValueType.UNKNOWN, ValueType.NULL): + raise ValueError( + f"field {name} cannot have all null values for type inference." + ) + + return inferred_dtype def _type_err(item, dtype): - raise ValueError(f'Value "{item}" is of type {type(item)} not of type {dtype}') + raise TypeError(f'Value "{item}" is of type {type(item)} not of type {dtype}') + + +PYTHON_LIST_VALUE_TYPE_TO_PROTO_VALUE: Dict[ + ValueType, Tuple[ListType, str, List[Type]] +] = { + ValueType.FLOAT_LIST: ( + FloatList, + "float_list_val", + [np.float32, np.float64, float], + ), + ValueType.DOUBLE_LIST: ( + DoubleList, + "double_list_val", + [np.float64, np.float32, float], + ), + ValueType.INT32_LIST: (Int32List, "int32_list_val", [np.int64, np.int32, int]), + ValueType.INT64_LIST: (Int64List, "int64_list_val", [np.int64, np.int32, int]), + ValueType.UNIX_TIMESTAMP_LIST: ( + Int64List, + "int64_list_val", + [np.datetime64, np.int64, np.int32, int, datetime, Timestamp], + ), + ValueType.STRING_LIST: (StringList, "string_list_val", [np.str_, str]), + ValueType.BOOL_LIST: (BoolList, "bool_list_val", [np.bool_, bool]), + ValueType.BYTES_LIST: (BytesList, "bytes_list_val", [np.bytes_, bytes]), +} + +PYTHON_SCALAR_VALUE_TYPE_TO_PROTO_VALUE: Dict[ + ValueType, Tuple[str, Any, Optional[Set[Type]]] +] = { + ValueType.INT32: ("int32_val", lambda x: int(x), None), + ValueType.INT64: ( + "int64_val", + lambda x: int(x.timestamp()) + if isinstance(x, pd._libs.tslibs.timestamps.Timestamp) + else int(x), + None, + ), + ValueType.FLOAT: ("float_val", lambda x: float(x), None), + ValueType.DOUBLE: ("double_val", lambda x: x, {float, np.float64}), + ValueType.STRING: ("string_val", lambda x: str(x), None), + ValueType.BYTES: ("bytes_val", lambda x: x, {bytes}), + ValueType.BOOL: ("bool_val", lambda x: x, {bool, np.bool_}), +} + + +def _python_datetime_to_int_timestamp( + values: Sequence[Any], +) -> Sequence[Union[int, np.int_]]: + # Fast path for Numpy array. + if isinstance(values, np.ndarray) and isinstance(values.dtype, np.datetime64): + if values.ndim != 1: + raise ValueError("Only 1 dimensional arrays are supported.") + return cast(Sequence[np.int_], values.astype("datetime64[s]").astype(np.int_)) + + int_timestamps = [] + for value in values: + if isinstance(value, datetime): + int_timestamps.append(int(value.timestamp())) + elif isinstance(value, Timestamp): + int_timestamps.append(int(value.ToSeconds())) + elif isinstance(value, np.datetime64): + int_timestamps.append(value.astype("datetime64[s]").astype(np.int_)) + else: + int_timestamps.append(int(value)) + return int_timestamps -def _python_value_to_proto_value(feast_value_type, value) -> ProtoValue: +def _python_value_to_proto_value( + feast_value_type: ValueType, values: List[Any] +) -> List[ProtoValue]: """ Converts a Python (native, pandas) value to a Feast Proto Value based on a provided value type Args: feast_value_type: The target value type - value: Value that will be converted + values: List of Values that will be converted Returns: - Feast Value Proto + List of Feast Value Proto """ + # ToDo: make a better sample for type checks (more than one element) + sample = next(filter(_non_empty_value, values), None) # first not empty value # Detect list type and handle separately if "list" in feast_value_type.name.lower(): - - if feast_value_type == ValueType.FLOAT_LIST: - return ProtoValue( - float_list_val=FloatList( - val=[ - item - if type(item) in [np.float32, np.float64, float] - else _type_err(item, np.float32) - for item in value - ] + # Feature can be list but None is still valid + if feast_value_type in PYTHON_LIST_VALUE_TYPE_TO_PROTO_VALUE: + proto_type, field_name, valid_types = PYTHON_LIST_VALUE_TYPE_TO_PROTO_VALUE[ + feast_value_type + ] + + if sample is not None and not all( + type(item) in valid_types for item in sample + ): + first_invalid = next( + item for item in sample if type(item) not in valid_types ) - ) - - if feast_value_type == ValueType.DOUBLE_LIST: - return ProtoValue( - double_list_val=DoubleList( - val=[ - item - if type(item) in [np.float64, np.float32, float] - else _type_err(item, np.float64) - for item in value - ] - ) - ) - - if feast_value_type == ValueType.INT32_LIST: - return ProtoValue( - int32_list_val=Int32List( - val=[ - item if type(item) is np.int32 else _type_err(item, np.int32) - for item in value - ] - ) - ) - - if feast_value_type == ValueType.INT64_LIST: - return ProtoValue( - int64_list_val=Int64List( - val=[ - item - if type(item) in [np.int64, np.int32] - else _type_err(item, np.int64) - for item in value - ] - ) - ) + raise _type_err(first_invalid, valid_types[0]) - if feast_value_type == ValueType.UNIX_TIMESTAMP_LIST: - return ProtoValue( - int64_list_val=Int64List( - val=[ - item - if type(item) in [np.int64, np.int32] - else _type_err(item, np.int64) - for item in value - ] + if feast_value_type == ValueType.UNIX_TIMESTAMP_LIST: + int_timestamps_lists = ( + _python_datetime_to_int_timestamp(value) for value in values ) - ) + return [ + # ProtoValue does actually accept `np.int_` but the typing complains. + ProtoValue(unix_timestamp_list_val=Int64List(val=ts)) # type: ignore + for ts in int_timestamps_lists + ] + if feast_value_type == ValueType.BOOL_LIST: + # ProtoValue does not support conversion of np.bool_ so we need to convert it to support np.bool_. + return [ + ProtoValue(**{field_name: proto_type(val=[bool(e) for e in value])}) # type: ignore + if value is not None + else ProtoValue() + for value in values + ] + return [ + ProtoValue(**{field_name: proto_type(val=value)}) # type: ignore + if value is not None + else ProtoValue() + for value in values + ] - if feast_value_type == ValueType.STRING_LIST: - return ProtoValue( - string_list_val=StringList( - val=[ - item - if type(item) in [np.str_, str] - else _type_err(item, np.str_) - for item in value - ] - ) - ) - - if feast_value_type == ValueType.BOOL_LIST: - return ProtoValue( - bool_list_val=BoolList( - val=[ - item - if type(item) in [np.bool_, bool] - else _type_err(item, np.bool_) - for item in value - ] + # Handle scalar types below + else: + if sample is None: + # all input values are None + return [ProtoValue()] * len(values) + + if feast_value_type == ValueType.UNIX_TIMESTAMP: + int_timestamps = _python_datetime_to_int_timestamp(values) + # ProtoValue does actually accept `np.int_` but the typing complains. + return [ProtoValue(unix_timestamp_val=ts) for ts in int_timestamps] # type: ignore + + ( + field_name, + func, + valid_scalar_types, + ) = PYTHON_SCALAR_VALUE_TYPE_TO_PROTO_VALUE[feast_value_type] + if valid_scalar_types: + assert type(sample) in valid_scalar_types + if feast_value_type == ValueType.BOOL: + # ProtoValue does not support conversion of np.bool_ so we need to convert it to support np.bool_. + return [ + ProtoValue( + **{ + field_name: func( + bool(value) if type(value) is np.bool_ else value # type: ignore + ) + } ) + if not pd.isnull(value) + else ProtoValue() + for value in values + ] + if feast_value_type in PYTHON_SCALAR_VALUE_TYPE_TO_PROTO_VALUE: + return [ + ProtoValue(**{field_name: func(value)}) + if not pd.isnull(value) + else ProtoValue() + for value in values + ] + + raise Exception(f"Unsupported data type: ${str(type(values[0]))}") + + +def python_values_to_proto_values( + values: List[Any], feature_type: ValueType = ValueType.UNKNOWN +) -> List[ProtoValue]: + value_type = feature_type + sample = next(filter(_non_empty_value, values), None) # first not empty value + if sample is not None and feature_type == ValueType.UNKNOWN: + if isinstance(sample, (list, np.ndarray)): + value_type = ( + feature_type + if len(sample) == 0 + else python_type_to_feast_value_type("", sample) ) + else: + value_type = python_type_to_feast_value_type("", sample) - if feast_value_type == ValueType.BYTES_LIST: - return ProtoValue( - bytes_list_val=BytesList( - val=[ - item - if type(item) in [np.bytes_, bytes] - else _type_err(item, np.bytes_) - for item in value - ] - ) - ) + if value_type == ValueType.UNKNOWN: + raise TypeError("Couldn't infer value type from empty value") - # Handle scalar types below - else: - if pd.isnull(value): - return ProtoValue() - elif feast_value_type == ValueType.INT32: - return ProtoValue(int32_val=int(value)) - elif feast_value_type == ValueType.INT64: - return ProtoValue(int64_val=int(value)) - elif feast_value_type == ValueType.UNIX_TIMESTAMP: - return ProtoValue(int64_val=int(value)) - elif feast_value_type == ValueType.FLOAT: - return ProtoValue(float_val=float(value)) - elif feast_value_type == ValueType.DOUBLE: - assert type(value) is float or np.float64 - return ProtoValue(double_val=value) - elif feast_value_type == ValueType.STRING: - return ProtoValue(string_val=str(value)) - elif feast_value_type == ValueType.BYTES: - assert type(value) is bytes - return ProtoValue(bytes_val=value) - elif feast_value_type == ValueType.BOOL: - assert type(value) is bool - return ProtoValue(bool_val=value) - - raise Exception(f"Unsupported data type: ${str(type(value))}") - - -def python_value_to_proto_value( - value: Any, feature_type: ValueType = None -) -> ProtoValue: - value_type = ( - python_type_to_feast_value_type("", value) - if value is not None - else feature_type - ) - return _python_value_to_proto_value(value_type, value) + return _python_value_to_proto_value(value_type, values) -def _proto_str_to_value_type(proto_str: str) -> ValueType: +def _proto_value_to_value_type(proto_value: ProtoValue) -> ValueType: """ Returns Feast ValueType given Feast ValueType string. @@ -318,6 +417,7 @@ def _proto_str_to_value_type(proto_str: str) -> ValueType: Returns: A variant of ValueType. """ + proto_str = proto_value.WhichOneof("val") type_map = { "int32_val": ValueType.INT32, "int64_val": ValueType.INT64, @@ -333,38 +433,48 @@ def _proto_str_to_value_type(proto_str: str) -> ValueType: "string_list_val": ValueType.STRING_LIST, "bytes_list_val": ValueType.BYTES_LIST, "bool_list_val": ValueType.BOOL_LIST, + None: ValueType.NULL, } return type_map[proto_str] def pa_to_feast_value_type(pa_type_as_str: str) -> ValueType: - if re.match(r"^timestamp", pa_type_as_str): - return ValueType.INT64 + is_list = False + if pa_type_as_str.startswith("list", "") - type_map = { - "int32": ValueType.INT32, - "int64": ValueType.INT64, - "double": ValueType.DOUBLE, - "float": ValueType.FLOAT, - "string": ValueType.STRING, - "binary": ValueType.BYTES, - "bool": ValueType.BOOL, - "list": ValueType.INT32_LIST, - "list": ValueType.INT64_LIST, - "list": ValueType.DOUBLE_LIST, - "list": ValueType.FLOAT_LIST, - "list": ValueType.STRING_LIST, - "list": ValueType.BYTES_LIST, - "list": ValueType.BOOL_LIST, - } - return type_map[pa_type_as_str] - - -def bq_to_feast_value_type(bq_type_as_str): - type_map: Dict[ValueType, Union[str, Dict[str, Any]]] = { - "DATETIME": ValueType.STRING, # Update to ValueType.UNIX_TIMESTAMP once #1520 lands. - "TIMESTAMP": ValueType.STRING, # Update to ValueType.UNIX_TIMESTAMP once #1520 lands. + if pa_type_as_str.startswith("timestamp"): + value_type = ValueType.UNIX_TIMESTAMP + else: + type_map = { + "int32": ValueType.INT32, + "int64": ValueType.INT64, + "double": ValueType.DOUBLE, + "float": ValueType.FLOAT, + "string": ValueType.STRING, + "binary": ValueType.BYTES, + "bool": ValueType.BOOL, + "null": ValueType.NULL, + } + value_type = type_map[pa_type_as_str] + + if is_list: + value_type = ValueType[value_type.name + "_LIST"] + + return value_type + + +def bq_to_feast_value_type(bq_type_as_str: str) -> ValueType: + is_list = False + if bq_type_as_str.startswith("ARRAY<"): + is_list = True + bq_type_as_str = bq_type_as_str[6:-1] + + type_map: Dict[str, ValueType] = { + "DATETIME": ValueType.UNIX_TIMESTAMP, + "TIMESTAMP": ValueType.UNIX_TIMESTAMP, "INTEGER": ValueType.INT64, "INT64": ValueType.INT64, "STRING": ValueType.STRING, @@ -372,14 +482,15 @@ def bq_to_feast_value_type(bq_type_as_str): "FLOAT64": ValueType.DOUBLE, "BYTES": ValueType.BYTES, "BOOL": ValueType.BOOL, - "ARRAY": ValueType.INT64_LIST, - "ARRAY": ValueType.DOUBLE_LIST, - "ARRAY": ValueType.STRING_LIST, - "ARRAY": ValueType.BYTES_LIST, - "ARRAY": ValueType.BOOL_LIST, + "BOOLEAN": ValueType.BOOL, # legacy sql data type + "NULL": ValueType.NULL, } - return type_map[bq_type_as_str] + value_type = type_map[bq_type_as_str] + if is_list: + value_type = ValueType[value_type.name + "_LIST"] + + return value_type def redshift_to_feast_value_type(redshift_type_as_str: str) -> ValueType: @@ -402,10 +513,33 @@ def redshift_to_feast_value_type(redshift_type_as_str: str) -> ValueType: return type_map[redshift_type_as_str.lower()] -def pa_to_redshift_value_type(pa_type_as_str: str) -> str: +def snowflake_python_type_to_feast_value_type( + snowflake_python_type_as_str: str, +) -> ValueType: + + type_map = { + "str": ValueType.STRING, + "float64": ValueType.DOUBLE, + "int64": ValueType.INT64, + "uint64": ValueType.INT64, + "int32": ValueType.INT32, + "uint32": ValueType.INT32, + "int16": ValueType.INT32, + "uint16": ValueType.INT32, + "uint8": ValueType.INT32, + "int8": ValueType.INT32, + "datetime64[ns]": ValueType.UNIX_TIMESTAMP, + "object": ValueType.STRING, + "bool": ValueType.BOOL, + } + + return type_map[snowflake_python_type_as_str.lower()] + + +def pa_to_redshift_value_type(pa_type: pyarrow.DataType) -> str: # PyArrow types: https://arrow.apache.org/docs/python/api/datatypes.html # Redshift type: https://docs.aws.amazon.com/redshift/latest/dg/c_Supported_data_types.html - pa_type_as_str = pa_type_as_str.lower() + pa_type_as_str = str(pa_type).lower() if pa_type_as_str.startswith("timestamp"): if "tz=" in pa_type_as_str: return "timestamptz" @@ -419,6 +553,9 @@ def pa_to_redshift_value_type(pa_type_as_str: str) -> str: # PyArrow decimal types (e.g. "decimal(38,37)") luckily directly map to the Redshift type. return pa_type_as_str + if pa_type_as_str.startswith("list"): + return "super" + # We have to take into account how arrow types map to parquet types as well. # For example, null type maps to int32 in parquet, so we have to use int4 in Redshift. # Other mappings have also been adjusted accordingly. @@ -440,3 +577,65 @@ def pa_to_redshift_value_type(pa_type_as_str: str) -> str: } return type_map[pa_type_as_str] + + +def _non_empty_value(value: Any) -> bool: + """ + Check that there's enough data we can use for type inference. + If primitive type - just checking that it's not None + If iterable - checking that there's some elements (len > 0) + String is special case: "" - empty string is considered non empty + """ + return value is not None and ( + not isinstance(value, Sized) or len(value) > 0 or isinstance(value, str) + ) + + +def spark_to_feast_value_type(spark_type_as_str: str) -> ValueType: + # TODO not all spark types are convertible + # Current non-convertible types: interval, map, struct, structfield, decimal, binary + type_map: Dict[str, ValueType] = { + "null": ValueType.UNKNOWN, + "byte": ValueType.BYTES, + "string": ValueType.STRING, + "int": ValueType.INT32, + "short": ValueType.INT32, + "bigint": ValueType.INT64, + "long": ValueType.INT64, + "double": ValueType.DOUBLE, + "float": ValueType.FLOAT, + "boolean": ValueType.BOOL, + "timestamp": ValueType.UNIX_TIMESTAMP, + "array": ValueType.BYTES_LIST, + "array": ValueType.STRING_LIST, + "array": ValueType.INT32_LIST, + "array": ValueType.INT64_LIST, + "array": ValueType.DOUBLE_LIST, + "array": ValueType.FLOAT_LIST, + "array": ValueType.BOOL_LIST, + "array": ValueType.UNIX_TIMESTAMP_LIST, + } + # TODO: Find better way of doing this. + if type(spark_type_as_str) != str or spark_type_as_str not in type_map: + return ValueType.NULL + return type_map[spark_type_as_str.lower()] + + +def spark_schema_to_np_dtypes(dtypes: List[Tuple[str, str]]) -> Iterator[np.dtype]: + # TODO recheck all typing (also tz for timestamp) + # https://spark.apache.org/docs/latest/api/python/user_guide/arrow_pandas.html#timestamp-with-time-zone-semantics + + type_map = defaultdict( + lambda: np.dtype("O"), + { + "boolean": np.dtype("bool"), + "double": np.dtype("float64"), + "float": np.dtype("float64"), + "int": np.dtype("int64"), + "bigint": np.dtype("int64"), + "smallint": np.dtype("int64"), + "timestamp": np.dtype("datetime64[ns]"), + }, + ) + + return (type_map[t] for _, t in dtypes) diff --git a/sdk/python/feast/types.py b/sdk/python/feast/types.py new file mode 100644 index 0000000000..40c1d62e7d --- /dev/null +++ b/sdk/python/feast/types.py @@ -0,0 +1,194 @@ +# Copyright 2022 The Feast Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from abc import ABC, abstractmethod +from enum import Enum +from typing import Dict, Union + +from feast.value_type import ValueType + +PRIMITIVE_FEAST_TYPES_TO_VALUE_TYPES = { + "INVALID": "UNKNOWN", + "BYTES": "BYTES", + "STRING": "STRING", + "INT32": "INT32", + "INT64": "INT64", + "FLOAT64": "DOUBLE", + "FLOAT32": "FLOAT", + "BOOL": "BOOL", + "UNIX_TIMESTAMP": "UNIX_TIMESTAMP", +} + + +class ComplexFeastType(ABC): + """ + A ComplexFeastType represents a structured type that is recognized by Feast. + """ + + def __init__(self): + """Creates a ComplexFeastType object.""" + pass + + @abstractmethod + def to_value_type(self) -> ValueType: + """ + Converts a ComplexFeastType object to the corresponding ValueType enum. + """ + raise NotImplementedError + + def __hash__(self): + return hash(self.to_value_type().value) + + def __eq__(self, other): + return self.to_value_type() == other.to_value_type() + + +class PrimitiveFeastType(Enum): + """ + A PrimitiveFeastType represents a primitive type in Feast. + + Note that these values must match the values in /feast/protos/types/Value.proto. + """ + + INVALID = 0 + BYTES = 1 + STRING = 2 + INT32 = 3 + INT64 = 4 + FLOAT64 = 5 + FLOAT32 = 6 + BOOL = 7 + UNIX_TIMESTAMP = 8 + + def to_value_type(self) -> ValueType: + """ + Converts a PrimitiveFeastType object to the corresponding ValueType enum. + """ + value_type_name = PRIMITIVE_FEAST_TYPES_TO_VALUE_TYPES[self.name] + return ValueType[value_type_name] + + def __str__(self): + return PRIMITIVE_FEAST_TYPES_TO_STRING[self.name] + + def __eq__(self, other): + if isinstance(other, PrimitiveFeastType): + return self.value == other.value + else: + return False + + def __hash__(self): + return hash((PRIMITIVE_FEAST_TYPES_TO_STRING[self.name])) + + +Invalid = PrimitiveFeastType.INVALID +Bytes = PrimitiveFeastType.BYTES +String = PrimitiveFeastType.STRING +Bool = PrimitiveFeastType.BOOL +Int32 = PrimitiveFeastType.INT32 +Int64 = PrimitiveFeastType.INT64 +Float32 = PrimitiveFeastType.FLOAT32 +Float64 = PrimitiveFeastType.FLOAT64 +UnixTimestamp = PrimitiveFeastType.UNIX_TIMESTAMP + + +SUPPORTED_BASE_TYPES = [ + Invalid, + String, + Bytes, + Bool, + Int32, + Int64, + Float32, + Float64, + UnixTimestamp, +] + +PRIMITIVE_FEAST_TYPES_TO_STRING = { + "INVALID": "Invalid", + "STRING": "String", + "BYTES": "Bytes", + "BOOL": "Bool", + "INT32": "Int32", + "INT64": "Int64", + "FLOAT32": "Float32", + "FLOAT64": "Float64", + "UNIX_TIMESTAMP": "UnixTimestamp", +} + + +class Array(ComplexFeastType): + """ + An Array represents a list of types. + + Attributes: + base_type: The base type of the array. + """ + + base_type: Union[PrimitiveFeastType, ComplexFeastType] + + def __init__(self, base_type: Union[PrimitiveFeastType, ComplexFeastType]): + if base_type not in SUPPORTED_BASE_TYPES: + raise ValueError( + f"Type {type(base_type)} is currently not supported as a base type for Array." + ) + + self.base_type = base_type + + def to_value_type(self) -> ValueType: + assert isinstance(self.base_type, PrimitiveFeastType) + value_type_name = PRIMITIVE_FEAST_TYPES_TO_VALUE_TYPES[self.base_type.name] + value_type_list_name = value_type_name + "_LIST" + return ValueType[value_type_list_name] + + def __str__(self): + return f"Array({self.base_type})" + + +FeastType = Union[ComplexFeastType, PrimitiveFeastType] + + +VALUE_TYPES_TO_FEAST_TYPES: Dict["ValueType", FeastType] = { + ValueType.UNKNOWN: Invalid, + ValueType.BYTES: Bytes, + ValueType.STRING: String, + ValueType.INT32: Int32, + ValueType.INT64: Int64, + ValueType.DOUBLE: Float64, + ValueType.FLOAT: Float32, + ValueType.BOOL: Bool, + ValueType.UNIX_TIMESTAMP: UnixTimestamp, + ValueType.BYTES_LIST: Array(Bytes), + ValueType.STRING_LIST: Array(String), + ValueType.INT32_LIST: Array(Int32), + ValueType.INT64_LIST: Array(Int64), + ValueType.DOUBLE_LIST: Array(Float64), + ValueType.FLOAT_LIST: Array(Float32), + ValueType.BOOL_LIST: Array(Bool), + ValueType.UNIX_TIMESTAMP_LIST: Array(UnixTimestamp), +} + + +def from_value_type(value_type: ValueType,) -> FeastType: + """ + Converts a ValueType enum to a Feast type. + + Args: + value_type: The ValueType to be converted. + + Raises: + ValueError: The conversion could not be performed. + """ + if value_type in VALUE_TYPES_TO_FEAST_TYPES: + return VALUE_TYPES_TO_FEAST_TYPES[value_type] + + raise ValueError(f"Could not convert value type {value_type} to FeastType.") diff --git a/sdk/python/feast/usage.py b/sdk/python/feast/usage.py index a089f660db..90b659479d 100644 --- a/sdk/python/feast/usage.py +++ b/sdk/python/feast/usage.py @@ -11,168 +11,368 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import concurrent.futures +import contextlib +import contextvars +import dataclasses +import hashlib import logging import os +import platform import sys +import typing import uuid from datetime import datetime from functools import wraps from os.path import expanduser, join from pathlib import Path -from typing import List, Optional, Tuple import requests +from feast.constants import DEFAULT_FEAST_USAGE_VALUE, FEAST_USAGE from feast.version import get_version -USAGE_ENDPOINT = "https://us-central1-kf-feast.cloudfunctions.net/bq_telemetry_logger" +USAGE_ENDPOINT = "https://usage.feast.dev" + _logger = logging.getLogger(__name__) +_executor = concurrent.futures.ThreadPoolExecutor(max_workers=1) +_is_enabled = os.getenv(FEAST_USAGE, default=DEFAULT_FEAST_USAGE_VALUE) == "True" -class Usage: - def __init__(self): - self._usage_enabled: bool = False - self.check_env_and_configure() - - def check_env_and_configure(self): - usage_enabled = ( - os.getenv("FEAST_USAGE", default="True") == "True" - ) # written this way to turn the env var string into a boolean - - # Check if it changed - if usage_enabled != self._usage_enabled: - self._usage_enabled = usage_enabled - - if self._usage_enabled: - try: - feast_home_dir = join(expanduser("~"), ".feast") - Path(feast_home_dir).mkdir(exist_ok=True) - usage_filepath = join(feast_home_dir, "usage") - - self._is_test = os.getenv("FEAST_IS_USAGE_TEST", "False") == "True" - self._usage_counter = {"get_online_features": 0} - - if os.path.exists(usage_filepath): - with open(usage_filepath, "r") as f: - self._usage_id = f.read() - else: - self._usage_id = str(uuid.uuid4()) - - with open(usage_filepath, "w") as f: - f.write(self._usage_id) - print( - "Feast is an open source project that collects anonymized error reporting and usage statistics. To opt out or learn" - " more see https://docs.feast.dev/reference/usage" - ) - except Exception as e: - _logger.debug(f"Unable to configure usage {e}") +_constant_attributes = { + "session_id": str(uuid.uuid4()), + "installation_id": None, + "version": get_version(), + "python_version": platform.python_version(), + "platform": platform.platform(), + "env_signature": hashlib.md5( + ",".join( + sorted([k for k in os.environ.keys() if not k.startswith("FEAST")]) + ).encode() + ).hexdigest(), +} + + +@dataclasses.dataclass +class FnCall: + fn_name: str + id: str + + start: datetime + end: typing.Optional[datetime] = None + + parent_id: typing.Optional[str] = None + + +class Sampler: + def should_record(self, event) -> bool: + raise NotImplementedError @property - def usage_id(self) -> Optional[str]: - if os.getenv("FEAST_FORCE_USAGE_UUID"): - return os.getenv("FEAST_FORCE_USAGE_UUID") - return self._usage_id - - def log(self, function_name: str): - self.check_env_and_configure() - if self._usage_enabled and self.usage_id: - if function_name == "get_online_features": - if self._usage_counter["get_online_features"] % 10000 != 0: - self._usage_counter["get_online_features"] += 1 - return - - json = { - "function_name": function_name, - "telemetry_id": self.usage_id, - "timestamp": datetime.utcnow().isoformat(), - "version": get_version(), - "os": sys.platform, - "is_test": self._is_test, - } - try: - requests.post(USAGE_ENDPOINT, json=json) - except Exception as e: - if self._is_test: - raise e - else: - pass - return - - def log_exception(self, error_type: str, traceback: List[Tuple[str, int, str]]): - self.check_env_and_configure() - if self._usage_enabled and self.usage_id: - json = { - "error_type": error_type, - "traceback": traceback, - "telemetry_id": self.usage_id, - "version": get_version(), - "os": sys.platform, - "is_test": self._is_test, - } - try: - requests.post(USAGE_ENDPOINT, json=json) - except Exception as e: - if self._is_test: - raise e - else: - pass - return - - -def log_exceptions(func): - @wraps(func) - def exception_logging_wrapper(*args, **kwargs): - try: - result = func(*args, **kwargs) - except Exception as e: - error_type = type(e).__name__ - trace_to_log = [] - tb = e.__traceback__ - while tb is not None: - trace_to_log.append( - ( - _trim_filename(tb.tb_frame.f_code.co_filename), - tb.tb_lineno, - tb.tb_frame.f_code.co_name, - ) + def priority(self): + return 0 + + +class AlwaysSampler(Sampler): + def should_record(self, event) -> bool: + return True + + +class RatioSampler(Sampler): + MAX_COUNTER = (1 << 32) - 1 + + def __init__(self, ratio): + assert 0 < ratio <= 1, "Ratio must be within (0, 1]" + self.ratio = ratio + self.total_counter = 0 + self.sampled_counter = 0 + + def should_record(self, event) -> bool: + self.total_counter += 1 + if self.total_counter == self.MAX_COUNTER: + self.total_counter = 1 + self.sampled_counter = 1 + + decision = self.sampled_counter < self.ratio * self.total_counter + self.sampled_counter += int(decision) + return decision + + @property + def priority(self): + return int(1 / self.ratio) + + +class UsageContext: + attributes: typing.Dict[str, typing.Any] + + call_stack: typing.List[FnCall] + completed_calls: typing.List[FnCall] + + exception: typing.Optional[Exception] = None + traceback: typing.Optional[typing.Tuple[str, int, str]] = None + + sampler: Sampler = AlwaysSampler() + + def __init__(self): + self.attributes = {} + self.call_stack = [] + self.completed_calls = [] + + +_context = contextvars.ContextVar("usage_context", default=UsageContext()) + + +def _set_installation_id(): + if os.getenv("FEAST_FORCE_USAGE_UUID"): + _constant_attributes["installation_id"] = os.getenv("FEAST_FORCE_USAGE_UUID") + _constant_attributes["installation_ts"] = datetime.utcnow().isoformat() + return + + feast_home_dir = join(expanduser("~"), ".feast") + installation_timestamp = datetime.utcnow() + + try: + Path(feast_home_dir).mkdir(exist_ok=True) + usage_filepath = join(feast_home_dir, "usage") + + if os.path.exists(usage_filepath): + installation_timestamp = datetime.utcfromtimestamp( + os.path.getmtime(usage_filepath) + ) + with open(usage_filepath, "r") as f: + installation_id = f.read() + else: + installation_id = str(uuid.uuid4()) + + with open(usage_filepath, "w") as f: + f.write(installation_id) + print( + "Feast is an open source project that collects " + "anonymized error reporting and usage statistics. To opt out or learn" + " more see https://docs.feast.dev/reference/usage" + ) + except OSError as e: + _logger.debug(f"Unable to configure usage {e}") + installation_id = "undefined" + + _constant_attributes["installation_id"] = installation_id + _constant_attributes["installation_ts"] = installation_timestamp.isoformat() + + +_set_installation_id() + + +def _export(event: typing.Dict[str, typing.Any]): + _executor.submit(requests.post, USAGE_ENDPOINT, json=event, timeout=30) + + +def _produce_event(ctx: UsageContext): + is_test = bool({"pytest", "unittest"} & sys.modules.keys()) + event = { + "timestamp": datetime.utcnow().isoformat(), + "is_test": is_test, + "is_webserver": ( + not is_test and bool({"uwsgi", "gunicorn", "fastapi"} & sys.modules.keys()) + ), + "calls": [ + dict( + fn_name=c.fn_name, + id=c.id, + parent_id=c.parent_id, + start=c.start and c.start.isoformat(), + end=c.end and c.end.isoformat(), + ) + for c in reversed(ctx.completed_calls) + ], + "entrypoint": ctx.completed_calls[-1].fn_name, + "exception": repr(ctx.exception) if ctx.exception else None, + "traceback": ctx.traceback if ctx.exception else None, + **_constant_attributes, + } + event.update(ctx.attributes) + + if ctx.sampler and not ctx.sampler.should_record(event): + return + + _export(event) + + +@contextlib.contextmanager +def tracing_span(name): + """ + Context manager for wrapping heavy parts of code in tracing span + """ + if _is_enabled: + ctx = _context.get() + if not ctx.call_stack: + raise RuntimeError("tracing_span must be called in usage context") + + last_call = ctx.call_stack[-1] + fn_call = FnCall( + id=uuid.uuid4().hex, + parent_id=last_call.id, + fn_name=f"{last_call.fn_name}.{name}", + start=datetime.utcnow(), + ) + try: + yield + finally: + if _is_enabled: + fn_call.end = datetime.utcnow() + ctx.completed_calls.append(fn_call) + + +def log_exceptions_and_usage(*args, **attrs): + """ + This function decorator enables three components: + 1. Error tracking + 2. Usage statistic collection + 3. Time profiling + + This data is being collected, anonymized and sent to Feast Developers. + All events from nested decorated functions are being grouped into single event + to build comprehensive context useful for profiling and error tracking. + + Usage example (will result in one output event): + @log_exceptions_and_usage + def fn(...): + nested() + + @log_exceptions_and_usage(attr='value') + def nested(...): + deeply_nested() + + @log_exceptions_and_usage(attr2='value2', sample=RateSampler(rate=0.1)) + def deeply_nested(...): + ... + """ + sampler = attrs.pop("sampler", AlwaysSampler()) + + def decorator(func): + if not _is_enabled: + return func + + @wraps(func) + def wrapper(*args, **kwargs): + ctx = _context.get() + ctx.call_stack.append( + FnCall( + id=uuid.uuid4().hex, + parent_id=ctx.call_stack[-1].id if ctx.call_stack else None, + fn_name=_fn_fullname(func), + start=datetime.utcnow(), ) - tb = tb.tb_next - usage.log_exception(error_type, trace_to_log) - raise - return result - - return exception_logging_wrapper - - -def log_exceptions_and_usage(func): - @wraps(func) - def exception_logging_wrapper(*args, **kwargs): - try: - result = func(*args, **kwargs) - usage.log(func.__name__) - except Exception as e: - error_type = type(e).__name__ - trace_to_log = [] - tb = e.__traceback__ - while tb is not None: - trace_to_log.append( - ( - _trim_filename(tb.tb_frame.f_code.co_filename), - tb.tb_lineno, - tb.tb_frame.f_code.co_name, - ) + ) + ctx.attributes.update(attrs) + + try: + return func(*args, **kwargs) + except Exception: + if ctx.exception: + # exception was already recorded + raise + + _, exc, traceback = sys.exc_info() + ctx.exception = exc + ctx.traceback = _trace_to_log(traceback) + + if traceback: + raise exc.with_traceback(traceback) + + raise exc + finally: + last_call = ctx.call_stack.pop(-1) + last_call.end = datetime.utcnow() + ctx.completed_calls.append(last_call) + ctx.sampler = ( + sampler if sampler.priority > ctx.sampler.priority else ctx.sampler ) - tb = tb.tb_next - usage.log_exception(error_type, trace_to_log) - raise - return result - return exception_logging_wrapper + if not ctx.call_stack: + # we reached the root of the stack + _context.set(UsageContext()) # reset context to default values + _produce_event(ctx) + + return wrapper + + if args: + return decorator(args[0]) + + return decorator + + +def log_exceptions(*args, **attrs): + """ + Function decorator that track errors and send them to Feast Developers + """ + + def decorator(func): + if not _is_enabled: + return func + + @wraps(func) + def wrapper(*args, **kwargs): + if _context.get().call_stack: + # we're already inside usage context + # let it handle exception + return func(*args, **kwargs) + + fn_call = FnCall( + id=uuid.uuid4().hex, fn_name=_fn_fullname(func), start=datetime.utcnow() + ) + try: + return func(*args, **kwargs) + except Exception: + _, exc, traceback = sys.exc_info() + + fn_call.end = datetime.utcnow() + + ctx = UsageContext() + ctx.exception = exc + ctx.traceback = _trace_to_log(traceback) + ctx.attributes = attrs + ctx.completed_calls.append(fn_call) + _produce_event(ctx) + + if traceback: + raise exc.with_traceback(traceback) + + raise exc + + return wrapper + + if args: + return decorator(args[0]) + + return decorator + + +def set_usage_attribute(name, value): + """ + Extend current context with custom attribute + """ + ctx = _context.get() + ctx.attributes[name] = value def _trim_filename(filename: str) -> str: return filename.split("/")[-1] -# Single global usage object -usage = Usage() +def _fn_fullname(fn: typing.Callable): + return fn.__module__ + "." + fn.__qualname__ + + +def _trace_to_log(traceback): + log = [] + while traceback is not None: + log.append( + ( + _trim_filename(traceback.tb_frame.f_code.co_filename), + traceback.tb_lineno, + traceback.tb_frame.f_code.co_name, + ) + ) + traceback = traceback.tb_next + + return log diff --git a/sdk/python/feast/utils.py b/sdk/python/feast/utils.py index c57b126f0c..e521338680 100644 --- a/sdk/python/feast/utils.py +++ b/sdk/python/feast/utils.py @@ -4,8 +4,15 @@ def make_tzaware(t: datetime) -> datetime: - """ We assume tz-naive datetimes are UTC """ + """We assume tz-naive datetimes are UTC""" if t.tzinfo is None: return t.replace(tzinfo=utc) else: return t + + +def to_naive_utc(ts: datetime) -> datetime: + if ts.tzinfo is None: + return ts + else: + return ts.astimezone(utc).replace(tzinfo=None) diff --git a/sdk/python/feast/value_type.py b/sdk/python/feast/value_type.py index 15019a06f6..1904baf7bb 100644 --- a/sdk/python/feast/value_type.py +++ b/sdk/python/feast/value_type.py @@ -12,8 +12,17 @@ # See the License for the specific language governing permissions and # limitations under the License. import enum +from typing import Type, Union -from tensorflow_metadata.proto.v0 import schema_pb2 +from feast.protos.feast.types.Value_pb2 import ( + BoolList, + BytesList, + DoubleList, + FloatList, + Int32List, + Int64List, + StringList, +) class ValueType(enum.Enum): @@ -38,29 +47,15 @@ class ValueType(enum.Enum): FLOAT_LIST = 16 BOOL_LIST = 17 UNIX_TIMESTAMP_LIST = 18 + NULL = 19 - def to_tfx_schema_feature_type(self): - if self.value in [ - ValueType.BYTES.value, - ValueType.STRING.value, - ValueType.BOOL.value, - ValueType.BYTES_LIST.value, - ValueType.STRING_LIST.value, - ValueType.INT32_LIST.value, - ValueType.INT64_LIST.value, - ValueType.DOUBLE_LIST.value, - ValueType.FLOAT_LIST.value, - ValueType.BOOL_LIST.value, - ValueType.UNIX_TIMESTAMP_LIST.value, - ]: - return schema_pb2.FeatureType.BYTES - elif self.value in [ - ValueType.INT32.value, - ValueType.INT64.value, - ValueType.UNIX_TIMESTAMP.value, - ]: - return schema_pb2.FeatureType.INT - elif self.value in [ValueType.DOUBLE.value, ValueType.FLOAT.value]: - return schema_pb2.FeatureType.FLOAT - else: - return schema_pb2.FeatureType.TYPE_UNKNOWN + +ListType = Union[ + Type[BoolList], + Type[BytesList], + Type[DoubleList], + Type[FloatList], + Type[Int32List], + Type[Int64List], + Type[StringList], +] diff --git a/sdk/python/feast/version.py b/sdk/python/feast/version.py index 78ec6e1a31..577a6f2d57 100644 --- a/sdk/python/feast/version.py +++ b/sdk/python/feast/version.py @@ -2,12 +2,14 @@ def get_version(): - """ - Returns version information of the Feast Python Package - """ - + """Returns version information of the Feast Python Package.""" try: sdk_version = pkg_resources.get_distribution("feast").version except pkg_resources.DistributionNotFound: sdk_version = "unknown" return sdk_version + + +"""Contains the version string of Twitter Feast.""" + +__version__ = "0.20.2+twtr0" diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt new file mode 100644 index 0000000000..99258bd96d --- /dev/null +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -0,0 +1,750 @@ +# +# This file is autogenerated by pip-compile with python 3.10 +# To update, run: +# +# pip-compile --extra=ci --output-file=requirements/py3.10-ci-requirements.txt +# +absl-py==1.0.0 + # via tensorflow-metadata +adal==1.2.7 + # via + # azure-datalake-store + # msrestazure +adlfs==0.5.9 + # via feast (setup.py) +aiohttp==3.8.1 + # via + # adlfs + # gcsfs +aiosignal==1.2.0 + # via aiohttp +alabaster==0.7.12 + # via sphinx +altair==4.2.0 + # via great-expectations +anyio==3.5.0 + # via + # starlette + # watchgod +appdirs==1.4.4 + # via black +appnope==0.1.3 + # via ipython +asgiref==3.5.0 + # via uvicorn +asn1crypto==1.5.1 + # via + # oscrypto + # snowflake-connector-python +assertpy==1.1 + # via feast (setup.py) +asttokens==2.0.5 + # via stack-data +async-timeout==4.0.2 + # via + # aiohttp + # redis +attrs==21.4.0 + # via + # aiohttp + # black + # jsonschema + # pytest +avro==1.10.0 + # via feast (setup.py) +azure-core==1.23.1 + # via + # adlfs + # azure-identity + # azure-storage-blob +azure-datalake-store==0.0.52 + # via adlfs +azure-identity==1.9.0 + # via adlfs +azure-storage-blob==12.11.0 + # via adlfs +babel==2.9.1 + # via sphinx +backcall==0.2.0 + # via ipython +black==19.10b0 + # via feast (setup.py) +boto3==1.21.41 + # via + # feast (setup.py) + # moto +botocore==1.24.41 + # via + # boto3 + # moto + # s3transfer +cachecontrol==0.12.10 + # via firebase-admin +cachetools==4.2.4 + # via google-auth +certifi==2021.10.8 + # via + # minio + # msrest + # requests + # snowflake-connector-python +cffi==1.15.0 + # via + # azure-datalake-store + # cryptography + # snowflake-connector-python +cfgv==3.3.1 + # via pre-commit +charset-normalizer==2.0.12 + # via + # aiohttp + # requests + # snowflake-connector-python +click==8.1.2 + # via + # black + # feast (setup.py) + # great-expectations + # pip-tools + # uvicorn +cloudpickle==2.0.0 + # via dask +colorama==0.4.4 + # via + # feast (setup.py) + # great-expectations +coverage[toml]==6.3.2 + # via pytest-cov +cryptography==3.4.8 + # via + # adal + # azure-identity + # azure-storage-blob + # feast (setup.py) + # great-expectations + # moto + # msal + # pyopenssl + # snowflake-connector-python +dask==2022.1.1 + # via feast (setup.py) +dataclasses==0.6 + # via great-expectations +decorator==5.1.1 + # via + # gcsfs + # ipython +deprecated==1.2.13 + # via redis +deprecation==2.1.0 + # via testcontainers +dill==0.3.4 + # via feast (setup.py) +distlib==0.3.4 + # via virtualenv +docker==5.0.3 + # via + # feast (setup.py) + # testcontainers +docutils==0.17.1 + # via + # sphinx + # sphinx-rtd-theme +entrypoints==0.4 + # via altair +execnet==1.9.0 + # via pytest-xdist +executing==0.8.3 + # via stack-data +fastapi==0.75.1 + # via feast (setup.py) +fastavro==1.4.10 + # via + # feast (setup.py) + # pandavro +fastjsonschema==2.15.3 + # via nbformat +filelock==3.6.0 + # via virtualenv +firebase-admin==4.5.2 + # via feast (setup.py) +flake8==4.0.1 + # via feast (setup.py) +frozenlist==1.3.0 + # via + # aiohttp + # aiosignal +fsspec==2022.3.0 + # via + # adlfs + # dask + # gcsfs +gcsfs==2022.3.0 + # via feast (setup.py) +google-api-core[grpc]==1.31.5 + # via + # feast (setup.py) + # firebase-admin + # google-api-python-client + # google-cloud-bigquery + # google-cloud-bigquery-storage + # google-cloud-core + # google-cloud-datastore + # google-cloud-firestore +google-api-python-client==2.44.0 + # via firebase-admin +google-auth==1.35.0 + # via + # gcsfs + # google-api-core + # google-api-python-client + # google-auth-httplib2 + # google-auth-oauthlib + # google-cloud-core + # google-cloud-storage +google-auth-httplib2==0.1.0 + # via google-api-python-client +google-auth-oauthlib==0.5.1 + # via gcsfs +google-cloud-bigquery==2.34.3 + # via feast (setup.py) +google-cloud-bigquery-storage==2.13.1 + # via feast (setup.py) +google-cloud-core==1.7.2 + # via + # feast (setup.py) + # google-cloud-bigquery + # google-cloud-datastore + # google-cloud-firestore + # google-cloud-storage +google-cloud-datastore==2.5.1 + # via feast (setup.py) +google-cloud-firestore==2.4.0 + # via firebase-admin +google-cloud-storage==1.40.0 + # via + # feast (setup.py) + # firebase-admin + # gcsfs +google-crc32c==1.3.0 + # via google-resumable-media +google-resumable-media==1.3.3 + # via + # google-cloud-bigquery + # google-cloud-storage +googleapis-common-protos==1.52.0 + # via + # feast (setup.py) + # google-api-core + # tensorflow-metadata +great-expectations==0.14.13 + # via feast (setup.py) +grpcio==1.44.0 + # via + # feast (setup.py) + # google-api-core + # google-cloud-bigquery + # grpcio-reflection + # grpcio-testing + # grpcio-tools +grpcio-reflection==1.44.0 + # via feast (setup.py) +grpcio-testing==1.44.0 + # via feast (setup.py) +grpcio-tools==1.44.0 + # via feast (setup.py) +h11==0.13.0 + # via uvicorn +happybase==1.2.0 + # via feast (setup.py) +hiredis==2.0.0 + # via feast (setup.py) +httplib2==0.20.4 + # via + # google-api-python-client + # google-auth-httplib2 +httptools==0.4.0 + # via uvicorn +identify==2.4.12 + # via pre-commit +idna==3.3 + # via + # anyio + # requests + # snowflake-connector-python + # yarl +imagesize==1.3.0 + # via sphinx +importlib-metadata==4.11.3 + # via great-expectations +iniconfig==1.1.1 + # via pytest +ipython==8.2.0 + # via great-expectations +isodate==0.6.1 + # via msrest +isort==5.10.1 + # via feast (setup.py) +jedi==0.18.1 + # via ipython +jinja2==3.0.3 + # via + # altair + # feast (setup.py) + # great-expectations + # moto + # sphinx +jmespath==1.0.0 + # via + # boto3 + # botocore +jsonpatch==1.32 + # via great-expectations +jsonpointer==2.3 + # via jsonpatch +jsonschema==4.4.0 + # via + # altair + # feast (setup.py) + # great-expectations + # nbformat +jupyter-core==4.9.2 + # via nbformat +locket==0.2.1 + # via partd +markupsafe==2.1.1 + # via + # jinja2 + # moto +matplotlib-inline==0.1.3 + # via ipython +mccabe==0.6.1 + # via flake8 +minio==7.1.0 + # via feast (setup.py) +mistune==2.0.2 + # via great-expectations +mmh3==3.0.0 + # via feast (setup.py) +mock==2.0.0 + # via feast (setup.py) +moto==3.1.4 + # via feast (setup.py) +msal==1.17.0 + # via + # azure-identity + # msal-extensions +msal-extensions==0.3.1 + # via azure-identity +msgpack==1.0.3 + # via cachecontrol +msrest==0.6.21 + # via + # azure-storage-blob + # msrestazure +msrestazure==0.6.4 + # via adlfs +multidict==6.0.2 + # via + # aiohttp + # yarl +mypy==0.931 + # via feast (setup.py) +mypy-extensions==0.4.3 + # via mypy +mypy-protobuf==3.1 + # via feast (setup.py) +nbformat==5.3.0 + # via great-expectations +nodeenv==1.6.0 + # via pre-commit +numpy==1.21.6 + # via + # altair + # feast (setup.py) + # great-expectations + # pandas + # pandavro + # pyarrow + # scipy +oauthlib==3.2.0 + # via requests-oauthlib +oscrypto==1.3.0 + # via snowflake-connector-python +packaging==21.3 + # via + # dask + # deprecation + # google-api-core + # google-cloud-bigquery + # great-expectations + # pytest + # redis + # sphinx +pandas==1.3.5 + # via + # altair + # feast (setup.py) + # great-expectations + # pandavro + # snowflake-connector-python +pandavro==1.5.2 + # via feast (setup.py) +parso==0.8.3 + # via jedi +partd==1.2.0 + # via dask +pathspec==0.9.0 + # via black +pbr==5.8.1 + # via mock +pep517==0.12.0 + # via pip-tools +pexpect==4.8.0 + # via ipython +pickleshare==0.7.5 + # via ipython +pip-tools==6.6.0 + # via feast (setup.py) +platformdirs==2.5.1 + # via virtualenv +pluggy==1.0.0 + # via pytest +ply==3.11 + # via thriftpy2 +portalocker==2.4.0 + # via msal-extensions +pre-commit==2.18.1 + # via feast (setup.py) +prompt-toolkit==3.0.29 + # via ipython +proto-plus==1.19.6 + # via + # feast (setup.py) + # google-cloud-bigquery + # google-cloud-bigquery-storage + # google-cloud-datastore + # google-cloud-firestore +protobuf==3.19.4 + # via + # feast (setup.py) + # google-api-core + # google-cloud-bigquery + # googleapis-common-protos + # grpcio-reflection + # grpcio-testing + # grpcio-tools + # mypy-protobuf + # proto-plus + # tensorflow-metadata +psutil==5.9.0 + # via feast (setup.py) +ptyprocess==0.7.0 + # via pexpect +pure-eval==0.2.2 + # via stack-data +py==1.11.0 + # via + # pytest + # pytest-forked +py-cpuinfo==8.0.0 + # via pytest-benchmark +py4j==0.10.9.3 + # via pyspark +pyarrow==6.0.1 + # via + # feast (setup.py) + # snowflake-connector-python +pyasn1==0.4.8 + # via + # pyasn1-modules + # rsa +pyasn1-modules==0.2.8 + # via google-auth +pybindgen==0.22.1 + # via feast (setup.py) +pycodestyle==2.8.0 + # via flake8 +pycparser==2.21 + # via cffi +pycryptodomex==3.14.1 + # via snowflake-connector-python +pydantic==1.9.0 + # via + # fastapi + # feast (setup.py) +pyflakes==2.4.0 + # via flake8 +pygments==2.11.2 + # via + # ipython + # sphinx +pyjwt[crypto]==2.3.0 + # via + # adal + # msal + # snowflake-connector-python +pyopenssl==21.0.0 + # via snowflake-connector-python +pyparsing==2.4.7 + # via + # great-expectations + # httplib2 + # packaging +pyrsistent==0.18.1 + # via jsonschema +pyspark==3.2.1 + # via feast (setup.py) +pytest==7.1.1 + # via + # feast (setup.py) + # pytest-benchmark + # pytest-cov + # pytest-forked + # pytest-lazy-fixture + # pytest-mock + # pytest-ordering + # pytest-timeout + # pytest-xdist +pytest-benchmark==3.4.1 + # via feast (setup.py) +pytest-cov==3.0.0 + # via feast (setup.py) +pytest-forked==1.4.0 + # via pytest-xdist +pytest-lazy-fixture==0.6.3 + # via feast (setup.py) +pytest-mock==1.10.4 + # via feast (setup.py) +pytest-ordering==0.6 + # via feast (setup.py) +pytest-timeout==1.4.2 + # via feast (setup.py) +pytest-xdist==2.5.0 + # via feast (setup.py) +python-dateutil==2.8.2 + # via + # adal + # botocore + # google-cloud-bigquery + # great-expectations + # moto + # pandas +python-dotenv==0.20.0 + # via uvicorn +pytz==2022.1 + # via + # babel + # google-api-core + # great-expectations + # moto + # pandas + # snowflake-connector-python + # trino +pytz-deprecation-shim==0.1.0.post0 + # via tzlocal +pyyaml==6.0 + # via + # dask + # feast (setup.py) + # pre-commit + # uvicorn +redis==4.2.2 + # via feast (setup.py) +regex==2022.3.15 + # via black +requests==2.27.1 + # via + # adal + # adlfs + # azure-core + # azure-datalake-store + # cachecontrol + # docker + # gcsfs + # google-api-core + # google-cloud-bigquery + # google-cloud-storage + # great-expectations + # moto + # msal + # msrest + # requests-oauthlib + # responses + # snowflake-connector-python + # sphinx + # trino +requests-oauthlib==1.3.1 + # via + # google-auth-oauthlib + # msrest +responses==0.20.0 + # via moto +rsa==4.8 + # via google-auth +ruamel-yaml==0.17.17 + # via great-expectations +s3transfer==0.5.2 + # via boto3 +scipy==1.8.0 + # via great-expectations +six==1.16.0 + # via + # absl-py + # azure-core + # azure-identity + # google-api-core + # google-auth + # google-auth-httplib2 + # google-cloud-core + # google-resumable-media + # grpcio + # happybase + # mock + # msrestazure + # pandavro + # pyopenssl + # python-dateutil + # virtualenv +sniffio==1.2.0 + # via anyio +snowballstemmer==2.2.0 + # via sphinx +snowflake-connector-python[pandas]==2.7.6 + # via feast (setup.py) +sphinx==4.3.2 + # via + # feast (setup.py) + # sphinx-rtd-theme +sphinx-rtd-theme==1.0.0 + # via feast (setup.py) +sphinxcontrib-applehelp==1.0.2 + # via sphinx +sphinxcontrib-devhelp==1.0.2 + # via sphinx +sphinxcontrib-htmlhelp==2.0.0 + # via sphinx +sphinxcontrib-jsmath==1.0.1 + # via sphinx +sphinxcontrib-qthelp==1.0.3 + # via sphinx +sphinxcontrib-serializinghtml==1.1.5 + # via sphinx +stack-data==0.2.0 + # via ipython +starlette==0.17.1 + # via fastapi +tabulate==0.8.9 + # via feast (setup.py) +tenacity==8.0.1 + # via feast (setup.py) +tensorflow-metadata==1.7.0 + # via feast (setup.py) +termcolor==1.1.0 + # via great-expectations +testcontainers==3.5.3 + # via feast (setup.py) +thriftpy2==0.4.14 + # via happybase +toml==0.10.2 + # via + # black + # feast (setup.py) + # pre-commit +tomli==2.0.1 + # via + # coverage + # mypy + # pep517 + # pytest +toolz==0.11.2 + # via + # altair + # dask + # partd +tqdm==4.64.0 + # via + # feast (setup.py) + # great-expectations +traitlets==5.1.1 + # via + # ipython + # jupyter-core + # matplotlib-inline + # nbformat +trino==0.312.0 + # via feast (setup.py) +typed-ast==1.5.2 + # via black +types-protobuf==3.19.15 + # via + # feast (setup.py) + # mypy-protobuf +types-python-dateutil==2.8.10 + # via feast (setup.py) +types-pytz==2021.3.6 + # via feast (setup.py) +types-pyyaml==6.0.5 + # via feast (setup.py) +types-redis==4.1.19 + # via feast (setup.py) +types-requests==2.27.16 + # via feast (setup.py) +types-setuptools==57.4.12 + # via feast (setup.py) +types-tabulate==0.8.6 + # via feast (setup.py) +types-urllib3==1.26.11 + # via types-requests +typing-extensions==4.1.1 + # via + # azure-core + # great-expectations + # mypy + # pydantic +tzdata==2022.1 + # via pytz-deprecation-shim +tzlocal==4.2 + # via great-expectations +uritemplate==4.1.1 + # via google-api-python-client +urllib3==1.26.9 + # via + # botocore + # feast (setup.py) + # great-expectations + # minio + # requests + # responses +uvicorn[standard]==0.17.6 + # via feast (setup.py) +uvloop==0.16.0 + # via uvicorn +virtualenv==20.14.1 + # via pre-commit +watchgod==0.8.2 + # via uvicorn +wcwidth==0.2.5 + # via prompt-toolkit +websocket-client==1.3.2 + # via docker +websockets==10.2 + # via uvicorn +werkzeug==2.1.1 + # via moto +wheel==0.37.1 + # via pip-tools +wrapt==1.14.0 + # via + # deprecated + # testcontainers +xmltodict==0.12.0 + # via moto +yarl==1.7.2 + # via aiohttp +zipp==3.8.0 + # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# pip +# setuptools diff --git a/sdk/python/requirements/py3.10-requirements.txt b/sdk/python/requirements/py3.10-requirements.txt new file mode 100644 index 0000000000..55c5807b50 --- /dev/null +++ b/sdk/python/requirements/py3.10-requirements.txt @@ -0,0 +1,169 @@ +# +# This file is autogenerated by pip-compile with python 3.10 +# To update, run: +# +# pip-compile --output-file=requirements/py3.10-requirements.txt +# +absl-py==1.0.0 + # via tensorflow-metadata +anyio==3.5.0 + # via + # starlette + # watchgod +asgiref==3.5.0 + # via uvicorn +attrs==21.4.0 + # via jsonschema +cachetools==5.0.0 + # via google-auth +certifi==2021.10.8 + # via requests +charset-normalizer==2.0.12 + # via requests +click==8.1.2 + # via + # feast (setup.py) + # uvicorn +cloudpickle==2.0.0 + # via dask +colorama==0.4.4 + # via feast (setup.py) +dask==2022.1.1 + # via feast (setup.py) +dill==0.3.4 + # via feast (setup.py) +fastapi==0.75.1 + # via feast (setup.py) +fastavro==1.4.10 + # via + # feast (setup.py) + # pandavro +fsspec==2022.3.0 + # via dask +google-api-core==2.7.2 + # via feast (setup.py) +google-auth==2.6.5 + # via google-api-core +googleapis-common-protos==1.52.0 + # via + # feast (setup.py) + # google-api-core + # tensorflow-metadata +grpcio==1.44.0 + # via + # feast (setup.py) + # grpcio-reflection +grpcio-reflection==1.44.0 + # via feast (setup.py) +h11==0.13.0 + # via uvicorn +httptools==0.4.0 + # via uvicorn +idna==3.3 + # via + # anyio + # requests +jinja2==3.1.1 + # via feast (setup.py) +jsonschema==4.4.0 + # via feast (setup.py) +locket==0.2.1 + # via partd +markupsafe==2.1.1 + # via jinja2 +mmh3==3.0.0 + # via feast (setup.py) +numpy==1.21.6 + # via + # feast (setup.py) + # pandas + # pandavro + # pyarrow +packaging==21.3 + # via dask +pandas==1.4.2 + # via + # feast (setup.py) + # pandavro +pandavro==1.5.2 + # via feast (setup.py) +partd==1.2.0 + # via dask +proto-plus==1.19.6 + # via feast (setup.py) +protobuf==3.19.4 + # via + # feast (setup.py) + # google-api-core + # googleapis-common-protos + # grpcio-reflection + # proto-plus + # tensorflow-metadata +pyarrow==7.0.0 + # via feast (setup.py) +pyasn1==0.4.8 + # via + # pyasn1-modules + # rsa +pyasn1-modules==0.2.8 + # via google-auth +pydantic==1.9.0 + # via + # fastapi + # feast (setup.py) +pyparsing==3.0.8 + # via packaging +pyrsistent==0.18.1 + # via jsonschema +python-dateutil==2.8.2 + # via pandas +python-dotenv==0.20.0 + # via uvicorn +pytz==2022.1 + # via pandas +pyyaml==6.0 + # via + # dask + # feast (setup.py) + # uvicorn +requests==2.27.1 + # via google-api-core +rsa==4.8 + # via google-auth +six==1.16.0 + # via + # absl-py + # google-auth + # grpcio + # pandavro + # python-dateutil +sniffio==1.2.0 + # via anyio +starlette==0.17.1 + # via fastapi +tabulate==0.8.9 + # via feast (setup.py) +tenacity==8.0.1 + # via feast (setup.py) +tensorflow-metadata==1.7.0 + # via feast (setup.py) +toml==0.10.2 + # via feast (setup.py) +toolz==0.11.2 + # via + # dask + # partd +tqdm==4.64.0 + # via feast (setup.py) +typing-extensions==4.1.1 + # via pydantic +urllib3==1.26.9 + # via requests +uvicorn[standard]==0.17.6 + # via feast (setup.py) +uvloop==0.16.0 + # via uvicorn +watchgod==0.8.2 + # via uvicorn +websockets==10.2 + # via uvicorn diff --git a/sdk/python/requirements/py3.7-ci-requirements.txt b/sdk/python/requirements/py3.7-ci-requirements.txt new file mode 100644 index 0000000000..2d0da79a65 --- /dev/null +++ b/sdk/python/requirements/py3.7-ci-requirements.txt @@ -0,0 +1,870 @@ +# +# This file is autogenerated by pip-compile with python 3.7 +# To update, run: +# +# pip-compile --extra=ci --output-file=requirements/py3.7-ci-requirements.txt +# +absl-py==1.0.0 + # via tensorflow-metadata +adal==1.2.7 + # via + # azure-datalake-store + # msrestazure +adlfs==0.5.9 + # via feast (setup.py) +aiohttp==3.8.1 + # via + # adlfs + # gcsfs +aiosignal==1.2.0 + # via aiohttp +alabaster==0.7.12 + # via sphinx +altair==4.2.0 + # via great-expectations +anyio==3.5.0 + # via starlette +appdirs==1.4.4 + # via black +appnope==0.1.2 + # via + # ipykernel + # ipython +argon2-cffi==21.3.0 + # via notebook +argon2-cffi-bindings==21.2.0 + # via argon2-cffi +asgiref==3.5.0 + # via uvicorn +asn1crypto==1.4.0 + # via + # oscrypto + # snowflake-connector-python +assertpy==1.1 + # via feast (setup.py) +async-timeout==4.0.2 + # via + # aiohttp + # redis +asynctest==0.13.0 + # via aiohttp +attrs==21.4.0 + # via + # aiohttp + # black + # jsonschema + # pytest +avro==1.10.0 + # via feast (setup.py) +azure-core==1.23.0 + # via + # adlfs + # azure-identity + # azure-storage-blob +azure-datalake-store==0.0.52 + # via adlfs +azure-identity==1.8.0 + # via adlfs +azure-storage-blob==12.9.0 + # via adlfs +babel==2.9.1 + # via sphinx +backcall==0.2.0 + # via ipython +backports-zoneinfo==0.2.1 + # via + # pytz-deprecation-shim + # tzlocal +black==19.10b0 + # via feast (setup.py) +bleach==4.1.0 + # via nbconvert +boto3==1.21.11 + # via + # feast (setup.py) + # moto +botocore==1.24.11 + # via + # boto3 + # moto + # s3transfer +cachecontrol==0.12.10 + # via firebase-admin +cachetools==4.2.4 + # via google-auth +certifi==2021.10.8 + # via + # minio + # msrest + # requests + # snowflake-connector-python +cffi==1.15.0 + # via + # argon2-cffi-bindings + # azure-datalake-store + # cryptography + # snowflake-connector-python +cfgv==3.3.1 + # via pre-commit +charset-normalizer==2.0.12 + # via + # aiohttp + # requests + # snowflake-connector-python +click==8.0.4 + # via + # black + # feast (setup.py) + # great-expectations + # pip-tools + # uvicorn +cloudpickle==2.0.0 + # via dask +colorama==0.4.4 + # via feast (setup.py) +coverage[toml]==6.3.2 + # via pytest-cov +cryptography==3.4.8 + # via + # adal + # azure-identity + # azure-storage-blob + # feast (setup.py) + # moto + # msal + # pyjwt + # pyopenssl + # snowflake-connector-python +dask==2022.1.1 + # via feast (setup.py) +debugpy==1.5.1 + # via ipykernel +decorator==5.1.1 + # via + # gcsfs + # ipython +defusedxml==0.7.1 + # via nbconvert +deprecated==1.2.13 + # via redis +deprecation==2.1.0 + # via testcontainers +dill==0.3.4 + # via feast (setup.py) +distlib==0.3.4 + # via virtualenv +docker==5.0.3 + # via + # feast (setup.py) + # testcontainers +docutils==0.17.1 + # via + # sphinx + # sphinx-rtd-theme +entrypoints==0.4 + # via + # altair + # jupyter-client + # nbconvert +execnet==1.9.0 + # via pytest-xdist +fastapi==0.74.1 + # via feast (setup.py) +fastavro==1.4.9 + # via + # feast (setup.py) + # pandavro +filelock==3.6.0 + # via virtualenv +firebase-admin==4.5.2 + # via feast (setup.py) +flake8==4.0.1 + # via feast (setup.py) +frozenlist==1.3.0 + # via + # aiohttp + # aiosignal +fsspec==2022.2.0 + # via + # adlfs + # dask + # gcsfs +gcsfs==2022.2.0 + # via feast (setup.py) +google-api-core[grpc]==1.31.5 + # via + # feast (setup.py) + # firebase-admin + # google-api-python-client + # google-cloud-bigquery + # google-cloud-bigquery-storage + # google-cloud-core + # google-cloud-datastore + # google-cloud-firestore +google-api-python-client==2.39.0 + # via firebase-admin +google-auth==1.35.0 + # via + # gcsfs + # google-api-core + # google-api-python-client + # google-auth-httplib2 + # google-auth-oauthlib + # google-cloud-core + # google-cloud-storage +google-auth-httplib2==0.1.0 + # via google-api-python-client +google-auth-oauthlib==0.5.0 + # via gcsfs +google-cloud-bigquery==2.34.1 + # via feast (setup.py) +google-cloud-bigquery-storage==2.12.0 + # via feast (setup.py) +google-cloud-core==1.7.2 + # via + # feast (setup.py) + # google-cloud-bigquery + # google-cloud-datastore + # google-cloud-firestore + # google-cloud-storage +google-cloud-datastore==2.5.0 + # via feast (setup.py) +google-cloud-firestore==2.3.4 + # via firebase-admin +google-cloud-storage==1.40.0 + # via + # feast (setup.py) + # firebase-admin + # gcsfs +google-crc32c==1.3.0 + # via google-resumable-media +google-resumable-media==1.3.3 + # via + # google-cloud-bigquery + # google-cloud-storage +googleapis-common-protos==1.52.0 + # via + # feast (setup.py) + # google-api-core + # tensorflow-metadata +great-expectations==0.14.8 + # via feast (setup.py) +grpcio==1.44.0 + # via + # feast (setup.py) + # google-api-core + # google-cloud-bigquery + # grpcio-reflection + # grpcio-testing + # grpcio-tools +grpcio-reflection==1.44.0 + # via feast (setup.py) +grpcio-testing==1.44.0 + # via feast (setup.py) +grpcio-tools==1.44.0 + # via feast (setup.py) +h11==0.13.0 + # via uvicorn +happybase==1.2.0 + # via feast (setup.py) +hiredis==2.0.0 + # via feast (setup.py) +httplib2==0.20.4 + # via + # google-api-python-client + # google-auth-httplib2 +httptools==0.3.0 + # via uvicorn +identify==2.4.11 + # via pre-commit +idna==3.3 + # via + # anyio + # requests + # snowflake-connector-python + # yarl +imagesize==1.3.0 + # via sphinx +importlib-metadata==4.2.0 + # via + # click + # flake8 + # great-expectations + # jsonschema + # moto + # pep517 + # pluggy + # pre-commit + # pytest + # redis + # virtualenv +importlib-resources==5.4.0 + # via jsonschema +iniconfig==1.1.1 + # via pytest +ipykernel==6.9.1 + # via + # ipywidgets + # notebook +ipython==7.32.0 + # via + # ipykernel + # ipywidgets +ipython-genutils==0.2.0 + # via + # ipywidgets + # nbformat + # notebook +ipywidgets==7.6.5 + # via great-expectations +isodate==0.6.1 + # via msrest +isort==5.10.1 + # via feast (setup.py) +jedi==0.18.1 + # via ipython +jinja2==3.0.3 + # via + # altair + # feast (setup.py) + # great-expectations + # moto + # nbconvert + # notebook + # sphinx +jmespath==0.10.0 + # via + # boto3 + # botocore +jsonpatch==1.32 + # via great-expectations +jsonpointer==2.2 + # via jsonpatch +jsonschema==4.4.0 + # via + # altair + # feast (setup.py) + # great-expectations + # nbformat +jupyter-client==7.1.2 + # via + # ipykernel + # nbclient + # notebook +jupyter-core==4.9.2 + # via + # jupyter-client + # nbconvert + # nbformat + # notebook +jupyterlab-pygments==0.1.2 + # via nbconvert +jupyterlab-widgets==1.0.2 + # via ipywidgets +locket==0.2.1 + # via partd +markupsafe==2.1.0 + # via + # jinja2 + # moto +matplotlib-inline==0.1.3 + # via + # ipykernel + # ipython +mccabe==0.6.1 + # via flake8 +minio==7.1.0 + # via feast (setup.py) +mistune==0.8.4 + # via + # great-expectations + # nbconvert +mmh3==3.0.0 + # via feast (setup.py) +mock==2.0.0 + # via feast (setup.py) +moto==3.0.5 + # via feast (setup.py) +msal==1.17.0 + # via + # azure-identity + # msal-extensions +msal-extensions==0.3.1 + # via azure-identity +msgpack==1.0.3 + # via cachecontrol +msrest==0.6.21 + # via + # azure-storage-blob + # msrestazure +msrestazure==0.6.4 + # via adlfs +multidict==6.0.2 + # via + # aiohttp + # yarl +mypy==0.931 + # via feast (setup.py) +mypy-extensions==0.4.3 + # via mypy +mypy-protobuf==3.1 + # via feast (setup.py) +nbclient==0.5.11 + # via nbconvert +nbconvert==6.4.2 + # via notebook +nbformat==5.1.3 + # via + # ipywidgets + # nbclient + # nbconvert + # notebook +nest-asyncio==1.5.4 + # via + # ipykernel + # jupyter-client + # nbclient + # notebook +nodeenv==1.6.0 + # via pre-commit +notebook==6.4.10 + # via widgetsnbextension +numpy==1.21.5 + # via + # altair + # feast (setup.py) + # great-expectations + # pandas + # pandavro + # pyarrow + # scipy +oauthlib==3.2.0 + # via requests-oauthlib +oscrypto==1.2.1 + # via snowflake-connector-python +packaging==21.3 + # via + # bleach + # dask + # deprecation + # google-api-core + # google-cloud-bigquery + # google-cloud-firestore + # pytest + # redis + # sphinx +pandas==1.3.5 + # via + # altair + # feast (setup.py) + # great-expectations + # pandavro + # snowflake-connector-python +pandavro==1.5.2 + # via feast (setup.py) +pandocfilters==1.5.0 + # via nbconvert +parso==0.8.3 + # via jedi +partd==1.2.0 + # via dask +pathspec==0.9.0 + # via black +pbr==5.8.1 + # via mock +pep517==0.12.0 + # via pip-tools +pexpect==4.8.0 + # via ipython +pickleshare==0.7.5 + # via ipython +pip-tools==6.5.1 + # via feast (setup.py) +platformdirs==2.5.1 + # via virtualenv +pluggy==1.0.0 + # via pytest +ply==3.11 + # via thriftpy2 +portalocker==2.4.0 + # via msal-extensions +pre-commit==2.17.0 + # via feast (setup.py) +prometheus-client==0.13.1 + # via notebook +prompt-toolkit==3.0.28 + # via ipython +proto-plus==1.19.6 + # via + # feast (setup.py) + # google-cloud-bigquery + # google-cloud-bigquery-storage + # google-cloud-datastore + # google-cloud-firestore +protobuf==3.19.4 + # via + # feast (setup.py) + # google-api-core + # google-cloud-bigquery + # googleapis-common-protos + # grpcio-reflection + # grpcio-testing + # grpcio-tools + # mypy-protobuf + # proto-plus + # tensorflow-metadata +psutil==5.9.0 + # via feast (setup.py) +ptyprocess==0.7.0 + # via + # pexpect + # terminado +py==1.11.0 + # via + # pytest + # pytest-forked +py-cpuinfo==8.0.0 + # via pytest-benchmark +py4j==0.10.9.3 + # via pyspark +pyarrow==6.0.1 + # via + # feast (setup.py) + # snowflake-connector-python +pyasn1==0.4.8 + # via + # pyasn1-modules + # rsa +pyasn1-modules==0.2.8 + # via google-auth +pybindgen==0.22.0 + # via feast (setup.py) +pycodestyle==2.8.0 + # via flake8 +pycparser==2.21 + # via cffi +pycryptodomex==3.14.1 + # via snowflake-connector-python +pydantic==1.9.0 + # via + # fastapi + # feast (setup.py) +pyflakes==2.4.0 + # via flake8 +pygments==2.11.2 + # via + # ipython + # jupyterlab-pygments + # nbconvert + # sphinx +pyjwt[crypto]==2.3.0 + # via + # adal + # msal + # snowflake-connector-python +pyopenssl==21.0.0 + # via snowflake-connector-python +pyparsing==2.4.7 + # via + # great-expectations + # httplib2 + # packaging +pyrsistent==0.18.1 + # via jsonschema +pyspark==3.2.1 + # via feast (setup.py) +pytest==7.0.1 + # via + # feast (setup.py) + # pytest-benchmark + # pytest-cov + # pytest-forked + # pytest-lazy-fixture + # pytest-mock + # pytest-ordering + # pytest-timeout + # pytest-xdist +pytest-benchmark==3.4.1 + # via feast (setup.py) +pytest-cov==3.0.0 + # via feast (setup.py) +pytest-forked==1.4.0 + # via pytest-xdist +pytest-lazy-fixture==0.6.3 + # via feast (setup.py) +pytest-mock==1.10.4 + # via feast (setup.py) +pytest-ordering==0.6 + # via feast (setup.py) +pytest-timeout==1.4.2 + # via feast (setup.py) +pytest-xdist==2.5.0 + # via feast (setup.py) +python-dateutil==2.8.2 + # via + # adal + # botocore + # google-cloud-bigquery + # great-expectations + # jupyter-client + # moto + # pandas +python-dotenv==0.19.2 + # via uvicorn +pytz==2021.3 + # via + # babel + # google-api-core + # great-expectations + # moto + # pandas + # snowflake-connector-python + # trino +pytz-deprecation-shim==0.1.0.post0 + # via tzlocal +pyyaml==6.0 + # via + # dask + # feast (setup.py) + # pre-commit + # uvicorn +pyzmq==22.3.0 + # via + # jupyter-client + # notebook +redis==4.2.2 + # via feast (setup.py) +regex==2022.3.2 + # via black +requests==2.27.1 + # via + # adal + # adlfs + # azure-core + # azure-datalake-store + # cachecontrol + # docker + # gcsfs + # google-api-core + # google-cloud-bigquery + # google-cloud-storage + # great-expectations + # moto + # msal + # msrest + # requests-oauthlib + # responses + # snowflake-connector-python + # sphinx + # trino +requests-oauthlib==1.3.1 + # via + # google-auth-oauthlib + # msrest +responses==0.18.0 + # via moto +rsa==4.8 + # via google-auth +ruamel-yaml==0.17.17 + # via great-expectations +ruamel-yaml-clib==0.2.6 + # via ruamel-yaml +s3transfer==0.5.2 + # via boto3 +scipy==1.7.3 + # via great-expectations +send2trash==1.8.0 + # via notebook +six==1.16.0 + # via + # absl-py + # azure-core + # azure-identity + # bleach + # google-api-core + # google-auth + # google-auth-httplib2 + # google-cloud-core + # google-resumable-media + # grpcio + # happybase + # mock + # msrestazure + # pandavro + # pyopenssl + # python-dateutil + # virtualenv +sniffio==1.2.0 + # via anyio +snowballstemmer==2.2.0 + # via sphinx +snowflake-connector-python[pandas]==2.7.4 + # via feast (setup.py) +sphinx==4.3.2 + # via + # feast (setup.py) + # sphinx-rtd-theme +sphinx-rtd-theme==1.0.0 + # via feast (setup.py) +sphinxcontrib-applehelp==1.0.2 + # via sphinx +sphinxcontrib-devhelp==1.0.2 + # via sphinx +sphinxcontrib-htmlhelp==2.0.0 + # via sphinx +sphinxcontrib-jsmath==1.0.1 + # via sphinx +sphinxcontrib-qthelp==1.0.3 + # via sphinx +sphinxcontrib-serializinghtml==1.1.5 + # via sphinx +starlette==0.17.1 + # via fastapi +tabulate==0.8.9 + # via feast (setup.py) +tenacity==8.0.1 + # via feast (setup.py) +tensorflow-metadata==1.7.0 + # via feast (setup.py) +termcolor==1.1.0 + # via great-expectations +terminado==0.13.2 + # via notebook +testcontainers==3.5.3 + # via feast (setup.py) +thriftpy2==0.4.14 + # via happybase +toml==0.10.2 + # via + # black + # feast (setup.py) + # pre-commit +tomli==2.0.1 + # via + # coverage + # mypy + # pep517 + # pytest +toolz==0.11.2 + # via + # altair + # dask + # partd +tornado==6.1 + # via + # ipykernel + # jupyter-client + # notebook + # terminado +tqdm==4.63.0 + # via + # feast (setup.py) + # great-expectations +traitlets==5.1.1 + # via + # ipykernel + # ipython + # ipywidgets + # jupyter-client + # jupyter-core + # matplotlib-inline + # nbclient + # nbconvert + # nbformat + # notebook +trino==0.312.0 + # via feast (setup.py) +typed-ast==1.5.2 + # via + # black + # mypy +types-protobuf==3.19.12 + # via + # feast (setup.py) + # mypy-protobuf +types-python-dateutil==2.8.9 + # via feast (setup.py) +types-pytz==2021.3.5 + # via feast (setup.py) +types-pyyaml==6.0.4 + # via feast (setup.py) +types-redis==4.1.17 + # via feast (setup.py) +types-requests==2.27.11 + # via feast (setup.py) +types-setuptools==57.4.9 + # via feast (setup.py) +types-tabulate==0.8.5 + # via feast (setup.py) +types-urllib3==1.26.10 + # via types-requests +typing-extensions==4.1.1 + # via + # aiohttp + # anyio + # argon2-cffi + # asgiref + # async-timeout + # azure-core + # great-expectations + # h11 + # importlib-metadata + # jsonschema + # mypy + # pydantic + # redis + # starlette + # uvicorn + # yarl +tzdata==2021.5 + # via pytz-deprecation-shim +tzlocal==4.1 + # via great-expectations +uritemplate==4.1.1 + # via google-api-python-client +urllib3==1.26.8 + # via + # botocore + # feast (setup.py) + # minio + # requests + # responses +uvicorn[standard]==0.17.5 + # via feast (setup.py) +uvloop==0.16.0 + # via uvicorn +virtualenv==20.13.2 + # via pre-commit +watchgod==0.7 + # via uvicorn +wcwidth==0.2.5 + # via prompt-toolkit +webencodings==0.5.1 + # via bleach +websocket-client==1.3.1 + # via docker +websockets==10.2 + # via uvicorn +werkzeug==2.0.3 + # via moto +wheel==0.37.1 + # via pip-tools +widgetsnbextension==3.5.2 + # via ipywidgets +wrapt==1.13.3 + # via + # deprecated + # testcontainers +xmltodict==0.12.0 + # via moto +yarl==1.7.2 + # via aiohttp +zipp==3.7.0 + # via + # importlib-metadata + # importlib-resources + # pep517 + +# The following packages are considered to be unsafe in a requirements file: +# pip +# setuptools diff --git a/sdk/python/requirements/py3.7-requirements.txt b/sdk/python/requirements/py3.7-requirements.txt new file mode 100644 index 0000000000..1ca911f134 --- /dev/null +++ b/sdk/python/requirements/py3.7-requirements.txt @@ -0,0 +1,185 @@ +# +# This file is autogenerated by pip-compile with python 3.7 +# To update, run: +# +# pip-compile --output-file=requirements/py3.7-requirements.txt +# +absl-py==1.0.0 + # via tensorflow-metadata +anyio==3.5.0 + # via starlette +asgiref==3.5.0 + # via uvicorn +attrs==21.4.0 + # via jsonschema +cachetools==5.0.0 + # via google-auth +certifi==2021.10.8 + # via requests +charset-normalizer==2.0.12 + # via requests +click==8.0.4 + # via + # feast (setup.py) + # uvicorn +cloudpickle==2.0.0 + # via dask +colorama==0.4.4 + # via feast (setup.py) +dask==2022.1.1 + # via feast (setup.py) +dill==0.3.4 + # via feast (setup.py) +fastapi==0.74.1 + # via feast (setup.py) +fastavro==1.4.9 + # via + # feast (setup.py) + # pandavro +fsspec==2022.2.0 + # via dask +google-api-core==2.5.0 + # via feast (setup.py) +google-auth==2.6.0 + # via google-api-core +googleapis-common-protos==1.52.0 + # via + # feast (setup.py) + # google-api-core + # tensorflow-metadata +grpcio==1.44.0 + # via + # feast (setup.py) + # grpcio-reflection +grpcio-reflection==1.44.0 + # via feast (setup.py) +h11==0.13.0 + # via uvicorn +httptools==0.3.0 + # via uvicorn +idna==3.3 + # via + # anyio + # requests +importlib-metadata==4.11.1 + # via + # click + # jsonschema +importlib-resources==5.4.0 + # via jsonschema +jinja2==3.0.3 + # via feast (setup.py) +jsonschema==4.4.0 + # via feast (setup.py) +locket==0.2.1 + # via partd +markupsafe==2.1.0 + # via jinja2 +mmh3==3.0.0 + # via feast (setup.py) +numpy==1.21.5 + # via + # feast (setup.py) + # pandas + # pandavro + # pyarrow +packaging==21.3 + # via dask +pandas==1.3.5 + # via + # feast (setup.py) + # pandavro +pandavro==1.5.2 + # via feast (setup.py) +partd==1.2.0 + # via dask +proto-plus==1.19.6 + # via feast (setup.py) +protobuf==3.19.4 + # via + # feast (setup.py) + # google-api-core + # googleapis-common-protos + # grpcio-reflection + # proto-plus + # tensorflow-metadata +pyarrow==7.0.0 + # via feast (setup.py) +pyasn1==0.4.8 + # via + # pyasn1-modules + # rsa +pyasn1-modules==0.2.8 + # via google-auth +pydantic==1.9.0 + # via + # fastapi + # feast (setup.py) +pyparsing==3.0.7 + # via packaging +pyrsistent==0.18.1 + # via jsonschema +python-dateutil==2.8.2 + # via pandas +python-dotenv==0.19.2 + # via uvicorn +pytz==2021.3 + # via pandas +pyyaml==6.0 + # via + # dask + # feast (setup.py) + # uvicorn +requests==2.27.1 + # via google-api-core +rsa==4.8 + # via google-auth +six==1.16.0 + # via + # absl-py + # google-auth + # grpcio + # pandavro + # python-dateutil +sniffio==1.2.0 + # via anyio +starlette==0.17.1 + # via fastapi +tabulate==0.8.9 + # via feast (setup.py) +tenacity==8.0.1 + # via feast (setup.py) +tensorflow-metadata==1.6.0 + # via feast (setup.py) +toml==0.10.2 + # via feast (setup.py) +toolz==0.11.2 + # via + # dask + # partd +tqdm==4.62.3 + # via feast (setup.py) +typing-extensions==4.1.1 + # via + # anyio + # asgiref + # h11 + # importlib-metadata + # jsonschema + # pydantic + # starlette + # uvicorn +urllib3==1.26.8 + # via requests +uvicorn[standard]==0.17.5 + # via feast (setup.py) +uvloop==0.16.0 + # via uvicorn +watchgod==0.7 + # via uvicorn +websockets==10.2 + # via uvicorn +zipp==3.7.0 + # via + # importlib-metadata + # importlib-resources diff --git a/sdk/python/requirements/py3.8-ci-requirements.txt b/sdk/python/requirements/py3.8-ci-requirements.txt new file mode 100644 index 0000000000..f1d46e6ec4 --- /dev/null +++ b/sdk/python/requirements/py3.8-ci-requirements.txt @@ -0,0 +1,760 @@ +# +# This file is autogenerated by pip-compile with python 3.8 +# To update, run: +# +# pip-compile --extra=ci --output-file=requirements/py3.8-ci-requirements.txt +# +absl-py==1.0.0 + # via tensorflow-metadata +adal==1.2.7 + # via + # azure-datalake-store + # msrestazure +adlfs==0.5.9 + # via feast (setup.py) +aiohttp==3.8.1 + # via + # adlfs + # gcsfs +aiosignal==1.2.0 + # via aiohttp +alabaster==0.7.12 + # via sphinx +altair==4.2.0 + # via great-expectations +anyio==3.5.0 + # via + # starlette + # watchgod +appdirs==1.4.4 + # via black +appnope==0.1.3 + # via ipython +asgiref==3.5.0 + # via uvicorn +asn1crypto==1.5.1 + # via + # oscrypto + # snowflake-connector-python +assertpy==1.1 + # via feast (setup.py) +asttokens==2.0.5 + # via stack-data +async-timeout==4.0.2 + # via + # aiohttp + # redis +attrs==21.4.0 + # via + # aiohttp + # black + # jsonschema + # pytest +avro==1.10.0 + # via feast (setup.py) +azure-core==1.23.1 + # via + # adlfs + # azure-identity + # azure-storage-blob +azure-datalake-store==0.0.52 + # via adlfs +azure-identity==1.9.0 + # via adlfs +azure-storage-blob==12.11.0 + # via adlfs +babel==2.9.1 + # via sphinx +backcall==0.2.0 + # via ipython +backports-zoneinfo==0.2.1 + # via + # pytz-deprecation-shim + # tzlocal +black==19.10b0 + # via feast (setup.py) +boto3==1.21.41 + # via + # feast (setup.py) + # moto +botocore==1.24.41 + # via + # boto3 + # moto + # s3transfer +cachecontrol==0.12.10 + # via firebase-admin +cachetools==4.2.4 + # via google-auth +certifi==2021.10.8 + # via + # minio + # msrest + # requests + # snowflake-connector-python +cffi==1.15.0 + # via + # azure-datalake-store + # cryptography + # snowflake-connector-python +cfgv==3.3.1 + # via pre-commit +charset-normalizer==2.0.12 + # via + # aiohttp + # requests + # snowflake-connector-python +click==8.1.2 + # via + # black + # feast (setup.py) + # great-expectations + # pip-tools + # uvicorn +cloudpickle==2.0.0 + # via dask +colorama==0.4.4 + # via + # feast (setup.py) + # great-expectations +coverage[toml]==6.3.2 + # via pytest-cov +cryptography==3.4.8 + # via + # adal + # azure-identity + # azure-storage-blob + # feast (setup.py) + # great-expectations + # moto + # msal + # pyopenssl + # snowflake-connector-python +dask==2022.1.1 + # via feast (setup.py) +dataclasses==0.6 + # via great-expectations +decorator==5.1.1 + # via + # gcsfs + # ipython +deprecated==1.2.13 + # via redis +deprecation==2.1.0 + # via testcontainers +dill==0.3.4 + # via feast (setup.py) +distlib==0.3.4 + # via virtualenv +docker==5.0.3 + # via + # feast (setup.py) + # testcontainers +docutils==0.17.1 + # via + # sphinx + # sphinx-rtd-theme +entrypoints==0.4 + # via altair +execnet==1.9.0 + # via pytest-xdist +executing==0.8.3 + # via stack-data +fastapi==0.75.1 + # via feast (setup.py) +fastavro==1.4.10 + # via + # feast (setup.py) + # pandavro +fastjsonschema==2.15.3 + # via nbformat +filelock==3.6.0 + # via virtualenv +firebase-admin==4.5.2 + # via feast (setup.py) +flake8==4.0.1 + # via feast (setup.py) +frozenlist==1.3.0 + # via + # aiohttp + # aiosignal +fsspec==2022.3.0 + # via + # adlfs + # dask + # gcsfs +gcsfs==2022.3.0 + # via feast (setup.py) +google-api-core[grpc]==1.31.5 + # via + # feast (setup.py) + # firebase-admin + # google-api-python-client + # google-cloud-bigquery + # google-cloud-bigquery-storage + # google-cloud-core + # google-cloud-datastore + # google-cloud-firestore +google-api-python-client==2.44.0 + # via firebase-admin +google-auth==1.35.0 + # via + # gcsfs + # google-api-core + # google-api-python-client + # google-auth-httplib2 + # google-auth-oauthlib + # google-cloud-core + # google-cloud-storage +google-auth-httplib2==0.1.0 + # via google-api-python-client +google-auth-oauthlib==0.5.1 + # via gcsfs +google-cloud-bigquery==2.34.3 + # via feast (setup.py) +google-cloud-bigquery-storage==2.13.1 + # via feast (setup.py) +google-cloud-core==1.7.2 + # via + # feast (setup.py) + # google-cloud-bigquery + # google-cloud-datastore + # google-cloud-firestore + # google-cloud-storage +google-cloud-datastore==2.5.1 + # via feast (setup.py) +google-cloud-firestore==2.4.0 + # via firebase-admin +google-cloud-storage==1.40.0 + # via + # feast (setup.py) + # firebase-admin + # gcsfs +google-crc32c==1.3.0 + # via google-resumable-media +google-resumable-media==1.3.3 + # via + # google-cloud-bigquery + # google-cloud-storage +googleapis-common-protos==1.52.0 + # via + # feast (setup.py) + # google-api-core + # tensorflow-metadata +great-expectations==0.14.13 + # via feast (setup.py) +grpcio==1.44.0 + # via + # feast (setup.py) + # google-api-core + # google-cloud-bigquery + # grpcio-reflection + # grpcio-testing + # grpcio-tools +grpcio-reflection==1.44.0 + # via feast (setup.py) +grpcio-testing==1.44.0 + # via feast (setup.py) +grpcio-tools==1.44.0 + # via feast (setup.py) +h11==0.13.0 + # via uvicorn +happybase==1.2.0 + # via feast (setup.py) +hiredis==2.0.0 + # via feast (setup.py) +httplib2==0.20.4 + # via + # google-api-python-client + # google-auth-httplib2 +httptools==0.4.0 + # via uvicorn +identify==2.4.12 + # via pre-commit +idna==3.3 + # via + # anyio + # requests + # snowflake-connector-python + # yarl +imagesize==1.3.0 + # via sphinx +importlib-metadata==4.11.3 + # via great-expectations +importlib-resources==5.7.0 + # via jsonschema +iniconfig==1.1.1 + # via pytest +ipython==8.2.0 + # via great-expectations +isodate==0.6.1 + # via msrest +isort==5.10.1 + # via feast (setup.py) +jedi==0.18.1 + # via ipython +jinja2==3.0.3 + # via + # altair + # feast (setup.py) + # great-expectations + # moto + # sphinx +jmespath==1.0.0 + # via + # boto3 + # botocore +jsonpatch==1.32 + # via great-expectations +jsonpointer==2.3 + # via jsonpatch +jsonschema==4.4.0 + # via + # altair + # feast (setup.py) + # great-expectations + # nbformat +jupyter-core==4.9.2 + # via nbformat +locket==0.2.1 + # via partd +markupsafe==2.1.1 + # via + # jinja2 + # moto +matplotlib-inline==0.1.3 + # via ipython +mccabe==0.6.1 + # via flake8 +minio==7.1.0 + # via feast (setup.py) +mistune==2.0.2 + # via great-expectations +mmh3==3.0.0 + # via feast (setup.py) +mock==2.0.0 + # via feast (setup.py) +moto==3.1.4 + # via feast (setup.py) +msal==1.17.0 + # via + # azure-identity + # msal-extensions +msal-extensions==0.3.1 + # via azure-identity +msgpack==1.0.3 + # via cachecontrol +msrest==0.6.21 + # via + # azure-storage-blob + # msrestazure +msrestazure==0.6.4 + # via adlfs +multidict==6.0.2 + # via + # aiohttp + # yarl +mypy==0.931 + # via feast (setup.py) +mypy-extensions==0.4.3 + # via mypy +mypy-protobuf==3.1 + # via feast (setup.py) +nbformat==5.3.0 + # via great-expectations +nodeenv==1.6.0 + # via pre-commit +numpy==1.21.6 + # via + # altair + # feast (setup.py) + # great-expectations + # pandas + # pandavro + # pyarrow + # scipy +oauthlib==3.2.0 + # via requests-oauthlib +oscrypto==1.3.0 + # via snowflake-connector-python +packaging==21.3 + # via + # dask + # deprecation + # google-api-core + # google-cloud-bigquery + # great-expectations + # pytest + # redis + # sphinx +pandas==1.3.5 + # via + # altair + # feast (setup.py) + # great-expectations + # pandavro + # snowflake-connector-python +pandavro==1.5.2 + # via feast (setup.py) +parso==0.8.3 + # via jedi +partd==1.2.0 + # via dask +pathspec==0.9.0 + # via black +pbr==5.8.1 + # via mock +pep517==0.12.0 + # via pip-tools +pexpect==4.8.0 + # via ipython +pickleshare==0.7.5 + # via ipython +pip-tools==6.6.0 + # via feast (setup.py) +platformdirs==2.5.1 + # via virtualenv +pluggy==1.0.0 + # via pytest +ply==3.11 + # via thriftpy2 +portalocker==2.4.0 + # via msal-extensions +pre-commit==2.18.1 + # via feast (setup.py) +prompt-toolkit==3.0.29 + # via ipython +proto-plus==1.19.6 + # via + # feast (setup.py) + # google-cloud-bigquery + # google-cloud-bigquery-storage + # google-cloud-datastore + # google-cloud-firestore +protobuf==3.19.4 + # via + # feast (setup.py) + # google-api-core + # google-cloud-bigquery + # googleapis-common-protos + # grpcio-reflection + # grpcio-testing + # grpcio-tools + # mypy-protobuf + # proto-plus + # tensorflow-metadata +psutil==5.9.0 + # via feast (setup.py) +ptyprocess==0.7.0 + # via pexpect +pure-eval==0.2.2 + # via stack-data +py==1.11.0 + # via + # pytest + # pytest-forked +py-cpuinfo==8.0.0 + # via pytest-benchmark +py4j==0.10.9.3 + # via pyspark +pyarrow==6.0.1 + # via + # feast (setup.py) + # snowflake-connector-python +pyasn1==0.4.8 + # via + # pyasn1-modules + # rsa +pyasn1-modules==0.2.8 + # via google-auth +pybindgen==0.22.1 + # via feast (setup.py) +pycodestyle==2.8.0 + # via flake8 +pycparser==2.21 + # via cffi +pycryptodomex==3.14.1 + # via snowflake-connector-python +pydantic==1.9.0 + # via + # fastapi + # feast (setup.py) +pyflakes==2.4.0 + # via flake8 +pygments==2.11.2 + # via + # ipython + # sphinx +pyjwt[crypto]==2.3.0 + # via + # adal + # msal + # snowflake-connector-python +pyopenssl==21.0.0 + # via snowflake-connector-python +pyparsing==2.4.7 + # via + # great-expectations + # httplib2 + # packaging +pyrsistent==0.18.1 + # via jsonschema +pyspark==3.2.1 + # via feast (setup.py) +pytest==7.1.1 + # via + # feast (setup.py) + # pytest-benchmark + # pytest-cov + # pytest-forked + # pytest-lazy-fixture + # pytest-mock + # pytest-ordering + # pytest-timeout + # pytest-xdist +pytest-benchmark==3.4.1 + # via feast (setup.py) +pytest-cov==3.0.0 + # via feast (setup.py) +pytest-forked==1.4.0 + # via pytest-xdist +pytest-lazy-fixture==0.6.3 + # via feast (setup.py) +pytest-mock==1.10.4 + # via feast (setup.py) +pytest-ordering==0.6 + # via feast (setup.py) +pytest-timeout==1.4.2 + # via feast (setup.py) +pytest-xdist==2.5.0 + # via feast (setup.py) +python-dateutil==2.8.2 + # via + # adal + # botocore + # google-cloud-bigquery + # great-expectations + # moto + # pandas +python-dotenv==0.20.0 + # via uvicorn +pytz==2022.1 + # via + # babel + # google-api-core + # great-expectations + # moto + # pandas + # snowflake-connector-python + # trino +pytz-deprecation-shim==0.1.0.post0 + # via tzlocal +pyyaml==6.0 + # via + # dask + # feast (setup.py) + # pre-commit + # uvicorn +redis==4.2.2 + # via feast (setup.py) +regex==2022.3.15 + # via black +requests==2.27.1 + # via + # adal + # adlfs + # azure-core + # azure-datalake-store + # cachecontrol + # docker + # gcsfs + # google-api-core + # google-cloud-bigquery + # google-cloud-storage + # great-expectations + # moto + # msal + # msrest + # requests-oauthlib + # responses + # snowflake-connector-python + # sphinx + # trino +requests-oauthlib==1.3.1 + # via + # google-auth-oauthlib + # msrest +responses==0.20.0 + # via moto +rsa==4.8 + # via google-auth +ruamel-yaml==0.17.17 + # via great-expectations +ruamel-yaml-clib==0.2.6 + # via ruamel-yaml +s3transfer==0.5.2 + # via boto3 +scipy==1.8.0 + # via great-expectations +six==1.16.0 + # via + # absl-py + # azure-core + # azure-identity + # google-api-core + # google-auth + # google-auth-httplib2 + # google-cloud-core + # google-resumable-media + # grpcio + # happybase + # mock + # msrestazure + # pandavro + # pyopenssl + # python-dateutil + # virtualenv +sniffio==1.2.0 + # via anyio +snowballstemmer==2.2.0 + # via sphinx +snowflake-connector-python[pandas]==2.7.6 + # via feast (setup.py) +sphinx==4.3.2 + # via + # feast (setup.py) + # sphinx-rtd-theme +sphinx-rtd-theme==1.0.0 + # via feast (setup.py) +sphinxcontrib-applehelp==1.0.2 + # via sphinx +sphinxcontrib-devhelp==1.0.2 + # via sphinx +sphinxcontrib-htmlhelp==2.0.0 + # via sphinx +sphinxcontrib-jsmath==1.0.1 + # via sphinx +sphinxcontrib-qthelp==1.0.3 + # via sphinx +sphinxcontrib-serializinghtml==1.1.5 + # via sphinx +stack-data==0.2.0 + # via ipython +starlette==0.17.1 + # via fastapi +tabulate==0.8.9 + # via feast (setup.py) +tenacity==8.0.1 + # via feast (setup.py) +tensorflow-metadata==1.7.0 + # via feast (setup.py) +termcolor==1.1.0 + # via great-expectations +testcontainers==3.5.3 + # via feast (setup.py) +thriftpy2==0.4.14 + # via happybase +toml==0.10.2 + # via + # black + # feast (setup.py) + # pre-commit +tomli==2.0.1 + # via + # coverage + # mypy + # pep517 + # pytest +toolz==0.11.2 + # via + # altair + # dask + # partd +tqdm==4.64.0 + # via + # feast (setup.py) + # great-expectations +traitlets==5.1.1 + # via + # ipython + # jupyter-core + # matplotlib-inline + # nbformat +trino==0.312.0 + # via feast (setup.py) +typed-ast==1.5.2 + # via black +types-protobuf==3.19.15 + # via + # feast (setup.py) + # mypy-protobuf +types-python-dateutil==2.8.10 + # via feast (setup.py) +types-pytz==2021.3.6 + # via feast (setup.py) +types-pyyaml==6.0.5 + # via feast (setup.py) +types-redis==4.1.19 + # via feast (setup.py) +types-requests==2.27.16 + # via feast (setup.py) +types-setuptools==57.4.12 + # via feast (setup.py) +types-tabulate==0.8.6 + # via feast (setup.py) +types-urllib3==1.26.11 + # via types-requests +typing-extensions==4.1.1 + # via + # azure-core + # great-expectations + # mypy + # pydantic +tzdata==2022.1 + # via pytz-deprecation-shim +tzlocal==4.2 + # via great-expectations +uritemplate==4.1.1 + # via google-api-python-client +urllib3==1.26.9 + # via + # botocore + # feast (setup.py) + # great-expectations + # minio + # requests + # responses +uvicorn[standard]==0.17.6 + # via feast (setup.py) +uvloop==0.16.0 + # via uvicorn +virtualenv==20.14.1 + # via pre-commit +watchgod==0.8.2 + # via uvicorn +wcwidth==0.2.5 + # via prompt-toolkit +websocket-client==1.3.2 + # via docker +websockets==10.2 + # via uvicorn +werkzeug==2.1.1 + # via moto +wheel==0.37.1 + # via pip-tools +wrapt==1.14.0 + # via + # deprecated + # testcontainers +xmltodict==0.12.0 + # via moto +yarl==1.7.2 + # via aiohttp +zipp==3.8.0 + # via + # importlib-metadata + # importlib-resources + +# The following packages are considered to be unsafe in a requirements file: +# pip +# setuptools diff --git a/sdk/python/requirements/py3.8-requirements.txt b/sdk/python/requirements/py3.8-requirements.txt new file mode 100644 index 0000000000..f0fc61e1b2 --- /dev/null +++ b/sdk/python/requirements/py3.8-requirements.txt @@ -0,0 +1,173 @@ +# +# This file is autogenerated by pip-compile with python 3.8 +# To update, run: +# +# pip-compile --output-file=requirements/py3.8-requirements.txt +# +absl-py==1.0.0 + # via tensorflow-metadata +anyio==3.5.0 + # via + # starlette + # watchgod +asgiref==3.5.0 + # via uvicorn +attrs==21.4.0 + # via jsonschema +cachetools==5.0.0 + # via google-auth +certifi==2021.10.8 + # via requests +charset-normalizer==2.0.12 + # via requests +click==8.1.2 + # via + # feast (setup.py) + # uvicorn +cloudpickle==2.0.0 + # via dask +colorama==0.4.4 + # via feast (setup.py) +dask==2022.1.1 + # via feast (setup.py) +dill==0.3.4 + # via feast (setup.py) +fastapi==0.75.1 + # via feast (setup.py) +fastavro==1.4.10 + # via + # feast (setup.py) + # pandavro +fsspec==2022.3.0 + # via dask +google-api-core==2.7.2 + # via feast (setup.py) +google-auth==2.6.5 + # via google-api-core +googleapis-common-protos==1.52.0 + # via + # feast (setup.py) + # google-api-core + # tensorflow-metadata +grpcio==1.44.0 + # via + # feast (setup.py) + # grpcio-reflection +grpcio-reflection==1.44.0 + # via feast (setup.py) +h11==0.13.0 + # via uvicorn +httptools==0.4.0 + # via uvicorn +idna==3.3 + # via + # anyio + # requests +importlib-resources==5.7.0 + # via jsonschema +jinja2==3.1.1 + # via feast (setup.py) +jsonschema==4.4.0 + # via feast (setup.py) +locket==0.2.1 + # via partd +markupsafe==2.1.1 + # via jinja2 +mmh3==3.0.0 + # via feast (setup.py) +numpy==1.21.6 + # via + # feast (setup.py) + # pandas + # pandavro + # pyarrow +packaging==21.3 + # via dask +pandas==1.4.2 + # via + # feast (setup.py) + # pandavro +pandavro==1.5.2 + # via feast (setup.py) +partd==1.2.0 + # via dask +proto-plus==1.19.6 + # via feast (setup.py) +protobuf==3.19.4 + # via + # feast (setup.py) + # google-api-core + # googleapis-common-protos + # grpcio-reflection + # proto-plus + # tensorflow-metadata +pyarrow==7.0.0 + # via feast (setup.py) +pyasn1==0.4.8 + # via + # pyasn1-modules + # rsa +pyasn1-modules==0.2.8 + # via google-auth +pydantic==1.9.0 + # via + # fastapi + # feast (setup.py) +pyparsing==3.0.8 + # via packaging +pyrsistent==0.18.1 + # via jsonschema +python-dateutil==2.8.2 + # via pandas +python-dotenv==0.20.0 + # via uvicorn +pytz==2022.1 + # via pandas +pyyaml==6.0 + # via + # dask + # feast (setup.py) + # uvicorn +requests==2.27.1 + # via google-api-core +rsa==4.8 + # via google-auth +six==1.16.0 + # via + # absl-py + # google-auth + # grpcio + # pandavro + # python-dateutil +sniffio==1.2.0 + # via anyio +starlette==0.17.1 + # via fastapi +tabulate==0.8.9 + # via feast (setup.py) +tenacity==8.0.1 + # via feast (setup.py) +tensorflow-metadata==1.7.0 + # via feast (setup.py) +toml==0.10.2 + # via feast (setup.py) +toolz==0.11.2 + # via + # dask + # partd +tqdm==4.64.0 + # via feast (setup.py) +typing-extensions==4.1.1 + # via pydantic +urllib3==1.26.9 + # via requests +uvicorn[standard]==0.17.6 + # via feast (setup.py) +uvloop==0.16.0 + # via uvicorn +watchgod==0.8.2 + # via uvicorn +websockets==10.2 + # via uvicorn +zipp==3.8.0 + # via importlib-resources diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt new file mode 100644 index 0000000000..c79daa3ffa --- /dev/null +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -0,0 +1,752 @@ +# +# This file is autogenerated by pip-compile with python 3.9 +# To update, run: +# +# pip-compile --extra=ci --output-file=requirements/py3.9-ci-requirements.txt +# +absl-py==1.0.0 + # via tensorflow-metadata +adal==1.2.7 + # via + # azure-datalake-store + # msrestazure +adlfs==0.5.9 + # via feast (setup.py) +aiohttp==3.8.1 + # via + # adlfs + # gcsfs +aiosignal==1.2.0 + # via aiohttp +alabaster==0.7.12 + # via sphinx +altair==4.2.0 + # via great-expectations +anyio==3.5.0 + # via + # starlette + # watchgod +appdirs==1.4.4 + # via black +appnope==0.1.3 + # via ipython +asgiref==3.5.0 + # via uvicorn +asn1crypto==1.5.1 + # via + # oscrypto + # snowflake-connector-python +assertpy==1.1 + # via feast (setup.py) +asttokens==2.0.5 + # via stack-data +async-timeout==4.0.2 + # via + # aiohttp + # redis +attrs==21.4.0 + # via + # aiohttp + # black + # jsonschema + # pytest +avro==1.10.0 + # via feast (setup.py) +azure-core==1.23.1 + # via + # adlfs + # azure-identity + # azure-storage-blob +azure-datalake-store==0.0.52 + # via adlfs +azure-identity==1.9.0 + # via adlfs +azure-storage-blob==12.11.0 + # via adlfs +babel==2.9.1 + # via sphinx +backcall==0.2.0 + # via ipython +black==19.10b0 + # via feast (setup.py) +boto3==1.21.41 + # via + # feast (setup.py) + # moto +botocore==1.24.41 + # via + # boto3 + # moto + # s3transfer +cachecontrol==0.12.10 + # via firebase-admin +cachetools==4.2.4 + # via google-auth +certifi==2021.10.8 + # via + # minio + # msrest + # requests + # snowflake-connector-python +cffi==1.15.0 + # via + # azure-datalake-store + # cryptography + # snowflake-connector-python +cfgv==3.3.1 + # via pre-commit +charset-normalizer==2.0.12 + # via + # aiohttp + # requests + # snowflake-connector-python +click==8.1.2 + # via + # black + # feast (setup.py) + # great-expectations + # pip-tools + # uvicorn +cloudpickle==2.0.0 + # via dask +colorama==0.4.4 + # via + # feast (setup.py) + # great-expectations +coverage[toml]==6.3.2 + # via pytest-cov +cryptography==3.4.8 + # via + # adal + # azure-identity + # azure-storage-blob + # feast (setup.py) + # great-expectations + # moto + # msal + # pyopenssl + # snowflake-connector-python +dask==2022.1.1 + # via feast (setup.py) +dataclasses==0.6 + # via great-expectations +decorator==5.1.1 + # via + # gcsfs + # ipython +deprecated==1.2.13 + # via redis +deprecation==2.1.0 + # via testcontainers +dill==0.3.4 + # via feast (setup.py) +distlib==0.3.4 + # via virtualenv +docker==5.0.3 + # via + # feast (setup.py) + # testcontainers +docutils==0.17.1 + # via + # sphinx + # sphinx-rtd-theme +entrypoints==0.4 + # via altair +execnet==1.9.0 + # via pytest-xdist +executing==0.8.3 + # via stack-data +fastapi==0.75.1 + # via feast (setup.py) +fastavro==1.4.10 + # via + # feast (setup.py) + # pandavro +fastjsonschema==2.15.3 + # via nbformat +filelock==3.6.0 + # via virtualenv +firebase-admin==4.5.2 + # via feast (setup.py) +flake8==4.0.1 + # via feast (setup.py) +frozenlist==1.3.0 + # via + # aiohttp + # aiosignal +fsspec==2022.3.0 + # via + # adlfs + # dask + # gcsfs +gcsfs==2022.3.0 + # via feast (setup.py) +google-api-core[grpc]==1.31.5 + # via + # feast (setup.py) + # firebase-admin + # google-api-python-client + # google-cloud-bigquery + # google-cloud-bigquery-storage + # google-cloud-core + # google-cloud-datastore + # google-cloud-firestore +google-api-python-client==2.44.0 + # via firebase-admin +google-auth==1.35.0 + # via + # gcsfs + # google-api-core + # google-api-python-client + # google-auth-httplib2 + # google-auth-oauthlib + # google-cloud-core + # google-cloud-storage +google-auth-httplib2==0.1.0 + # via google-api-python-client +google-auth-oauthlib==0.5.1 + # via gcsfs +google-cloud-bigquery==2.34.3 + # via feast (setup.py) +google-cloud-bigquery-storage==2.13.1 + # via feast (setup.py) +google-cloud-core==1.7.2 + # via + # feast (setup.py) + # google-cloud-bigquery + # google-cloud-datastore + # google-cloud-firestore + # google-cloud-storage +google-cloud-datastore==2.5.1 + # via feast (setup.py) +google-cloud-firestore==2.4.0 + # via firebase-admin +google-cloud-storage==1.40.0 + # via + # feast (setup.py) + # firebase-admin + # gcsfs +google-crc32c==1.3.0 + # via google-resumable-media +google-resumable-media==1.3.3 + # via + # google-cloud-bigquery + # google-cloud-storage +googleapis-common-protos==1.52.0 + # via + # feast (setup.py) + # google-api-core + # tensorflow-metadata +great-expectations==0.14.13 + # via feast (setup.py) +grpcio==1.44.0 + # via + # feast (setup.py) + # google-api-core + # google-cloud-bigquery + # grpcio-reflection + # grpcio-testing + # grpcio-tools +grpcio-reflection==1.44.0 + # via feast (setup.py) +grpcio-testing==1.44.0 + # via feast (setup.py) +grpcio-tools==1.44.0 + # via feast (setup.py) +h11==0.13.0 + # via uvicorn +happybase==1.2.0 + # via feast (setup.py) +hiredis==2.0.0 + # via feast (setup.py) +httplib2==0.20.4 + # via + # google-api-python-client + # google-auth-httplib2 +httptools==0.4.0 + # via uvicorn +identify==2.4.12 + # via pre-commit +idna==3.3 + # via + # anyio + # requests + # snowflake-connector-python + # yarl +imagesize==1.3.0 + # via sphinx +importlib-metadata==4.11.3 + # via great-expectations +iniconfig==1.1.1 + # via pytest +ipython==8.2.0 + # via great-expectations +isodate==0.6.1 + # via msrest +isort==5.10.1 + # via feast (setup.py) +jedi==0.18.1 + # via ipython +jinja2==3.0.3 + # via + # altair + # feast (setup.py) + # great-expectations + # moto + # sphinx +jmespath==1.0.0 + # via + # boto3 + # botocore +jsonpatch==1.32 + # via great-expectations +jsonpointer==2.3 + # via jsonpatch +jsonschema==4.4.0 + # via + # altair + # feast (setup.py) + # great-expectations + # nbformat +jupyter-core==4.9.2 + # via nbformat +locket==0.2.1 + # via partd +markupsafe==2.1.1 + # via + # jinja2 + # moto +matplotlib-inline==0.1.3 + # via ipython +mccabe==0.6.1 + # via flake8 +minio==7.1.0 + # via feast (setup.py) +mistune==2.0.2 + # via great-expectations +mmh3==3.0.0 + # via feast (setup.py) +mock==2.0.0 + # via feast (setup.py) +moto==3.1.4 + # via feast (setup.py) +msal==1.17.0 + # via + # azure-identity + # msal-extensions +msal-extensions==0.3.1 + # via azure-identity +msgpack==1.0.3 + # via cachecontrol +msrest==0.6.21 + # via + # azure-storage-blob + # msrestazure +msrestazure==0.6.4 + # via adlfs +multidict==6.0.2 + # via + # aiohttp + # yarl +mypy==0.931 + # via feast (setup.py) +mypy-extensions==0.4.3 + # via mypy +mypy-protobuf==3.1 + # via feast (setup.py) +nbformat==5.3.0 + # via great-expectations +nodeenv==1.6.0 + # via pre-commit +numpy==1.21.6 + # via + # altair + # feast (setup.py) + # great-expectations + # pandas + # pandavro + # pyarrow + # scipy +oauthlib==3.2.0 + # via requests-oauthlib +oscrypto==1.3.0 + # via snowflake-connector-python +packaging==21.3 + # via + # dask + # deprecation + # google-api-core + # google-cloud-bigquery + # great-expectations + # pytest + # redis + # sphinx +pandas==1.3.5 + # via + # altair + # feast (setup.py) + # great-expectations + # pandavro + # snowflake-connector-python +pandavro==1.5.2 + # via feast (setup.py) +parso==0.8.3 + # via jedi +partd==1.2.0 + # via dask +pathspec==0.9.0 + # via black +pbr==5.8.1 + # via mock +pep517==0.12.0 + # via pip-tools +pexpect==4.8.0 + # via ipython +pickleshare==0.7.5 + # via ipython +pip-tools==6.6.0 + # via feast (setup.py) +platformdirs==2.5.1 + # via virtualenv +pluggy==1.0.0 + # via pytest +ply==3.11 + # via thriftpy2 +portalocker==2.4.0 + # via msal-extensions +pre-commit==2.18.1 + # via feast (setup.py) +prompt-toolkit==3.0.29 + # via ipython +proto-plus==1.19.6 + # via + # feast (setup.py) + # google-cloud-bigquery + # google-cloud-bigquery-storage + # google-cloud-datastore + # google-cloud-firestore +protobuf==3.19.4 + # via + # feast (setup.py) + # google-api-core + # google-cloud-bigquery + # googleapis-common-protos + # grpcio-reflection + # grpcio-testing + # grpcio-tools + # mypy-protobuf + # proto-plus + # tensorflow-metadata +psutil==5.9.0 + # via feast (setup.py) +ptyprocess==0.7.0 + # via pexpect +pure-eval==0.2.2 + # via stack-data +py==1.11.0 + # via + # pytest + # pytest-forked +py-cpuinfo==8.0.0 + # via pytest-benchmark +py4j==0.10.9.3 + # via pyspark +pyarrow==6.0.1 + # via + # feast (setup.py) + # snowflake-connector-python +pyasn1==0.4.8 + # via + # pyasn1-modules + # rsa +pyasn1-modules==0.2.8 + # via google-auth +pybindgen==0.22.1 + # via feast (setup.py) +pycodestyle==2.8.0 + # via flake8 +pycparser==2.21 + # via cffi +pycryptodomex==3.14.1 + # via snowflake-connector-python +pydantic==1.9.0 + # via + # fastapi + # feast (setup.py) +pyflakes==2.4.0 + # via flake8 +pygments==2.11.2 + # via + # ipython + # sphinx +pyjwt[crypto]==2.3.0 + # via + # adal + # msal + # snowflake-connector-python +pyopenssl==21.0.0 + # via snowflake-connector-python +pyparsing==2.4.7 + # via + # great-expectations + # httplib2 + # packaging +pyrsistent==0.18.1 + # via jsonschema +pyspark==3.2.1 + # via feast (setup.py) +pytest==7.1.1 + # via + # feast (setup.py) + # pytest-benchmark + # pytest-cov + # pytest-forked + # pytest-lazy-fixture + # pytest-mock + # pytest-ordering + # pytest-timeout + # pytest-xdist +pytest-benchmark==3.4.1 + # via feast (setup.py) +pytest-cov==3.0.0 + # via feast (setup.py) +pytest-forked==1.4.0 + # via pytest-xdist +pytest-lazy-fixture==0.6.3 + # via feast (setup.py) +pytest-mock==1.10.4 + # via feast (setup.py) +pytest-ordering==0.6 + # via feast (setup.py) +pytest-timeout==1.4.2 + # via feast (setup.py) +pytest-xdist==2.5.0 + # via feast (setup.py) +python-dateutil==2.8.2 + # via + # adal + # botocore + # google-cloud-bigquery + # great-expectations + # moto + # pandas +python-dotenv==0.20.0 + # via uvicorn +pytz==2022.1 + # via + # babel + # google-api-core + # great-expectations + # moto + # pandas + # snowflake-connector-python + # trino +pytz-deprecation-shim==0.1.0.post0 + # via tzlocal +pyyaml==6.0 + # via + # dask + # feast (setup.py) + # pre-commit + # uvicorn +redis==4.2.2 + # via feast (setup.py) +regex==2022.3.15 + # via black +requests==2.27.1 + # via + # adal + # adlfs + # azure-core + # azure-datalake-store + # cachecontrol + # docker + # gcsfs + # google-api-core + # google-cloud-bigquery + # google-cloud-storage + # great-expectations + # moto + # msal + # msrest + # requests-oauthlib + # responses + # snowflake-connector-python + # sphinx + # trino +requests-oauthlib==1.3.1 + # via + # google-auth-oauthlib + # msrest +responses==0.20.0 + # via moto +rsa==4.8 + # via google-auth +ruamel-yaml==0.17.17 + # via great-expectations +ruamel-yaml-clib==0.2.6 + # via ruamel-yaml +s3transfer==0.5.2 + # via boto3 +scipy==1.8.0 + # via great-expectations +six==1.16.0 + # via + # absl-py + # azure-core + # azure-identity + # google-api-core + # google-auth + # google-auth-httplib2 + # google-cloud-core + # google-resumable-media + # grpcio + # happybase + # mock + # msrestazure + # pandavro + # pyopenssl + # python-dateutil + # virtualenv +sniffio==1.2.0 + # via anyio +snowballstemmer==2.2.0 + # via sphinx +snowflake-connector-python[pandas]==2.7.6 + # via feast (setup.py) +sphinx==4.3.2 + # via + # feast (setup.py) + # sphinx-rtd-theme +sphinx-rtd-theme==1.0.0 + # via feast (setup.py) +sphinxcontrib-applehelp==1.0.2 + # via sphinx +sphinxcontrib-devhelp==1.0.2 + # via sphinx +sphinxcontrib-htmlhelp==2.0.0 + # via sphinx +sphinxcontrib-jsmath==1.0.1 + # via sphinx +sphinxcontrib-qthelp==1.0.3 + # via sphinx +sphinxcontrib-serializinghtml==1.1.5 + # via sphinx +stack-data==0.2.0 + # via ipython +starlette==0.17.1 + # via fastapi +tabulate==0.8.9 + # via feast (setup.py) +tenacity==8.0.1 + # via feast (setup.py) +tensorflow-metadata==1.7.0 + # via feast (setup.py) +termcolor==1.1.0 + # via great-expectations +testcontainers==3.5.3 + # via feast (setup.py) +thriftpy2==0.4.14 + # via happybase +toml==0.10.2 + # via + # black + # feast (setup.py) + # pre-commit +tomli==2.0.1 + # via + # coverage + # mypy + # pep517 + # pytest +toolz==0.11.2 + # via + # altair + # dask + # partd +tqdm==4.64.0 + # via + # feast (setup.py) + # great-expectations +traitlets==5.1.1 + # via + # ipython + # jupyter-core + # matplotlib-inline + # nbformat +trino==0.312.0 + # via feast (setup.py) +typed-ast==1.5.2 + # via black +types-protobuf==3.19.15 + # via + # feast (setup.py) + # mypy-protobuf +types-python-dateutil==2.8.10 + # via feast (setup.py) +types-pytz==2021.3.6 + # via feast (setup.py) +types-pyyaml==6.0.5 + # via feast (setup.py) +types-redis==4.1.19 + # via feast (setup.py) +types-requests==2.27.16 + # via feast (setup.py) +types-setuptools==57.4.12 + # via feast (setup.py) +types-tabulate==0.8.6 + # via feast (setup.py) +types-urllib3==1.26.11 + # via types-requests +typing-extensions==4.1.1 + # via + # azure-core + # great-expectations + # mypy + # pydantic +tzdata==2022.1 + # via pytz-deprecation-shim +tzlocal==4.2 + # via great-expectations +uritemplate==4.1.1 + # via google-api-python-client +urllib3==1.26.9 + # via + # botocore + # feast (setup.py) + # great-expectations + # minio + # requests + # responses +uvicorn[standard]==0.17.6 + # via feast (setup.py) +uvloop==0.16.0 + # via uvicorn +virtualenv==20.14.1 + # via pre-commit +watchgod==0.8.2 + # via uvicorn +wcwidth==0.2.5 + # via prompt-toolkit +websocket-client==1.3.2 + # via docker +websockets==10.2 + # via uvicorn +werkzeug==2.1.1 + # via moto +wheel==0.37.1 + # via pip-tools +wrapt==1.14.0 + # via + # deprecated + # testcontainers +xmltodict==0.12.0 + # via moto +yarl==1.7.2 + # via aiohttp +zipp==3.8.0 + # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# pip +# setuptools diff --git a/sdk/python/requirements/py3.9-requirements.txt b/sdk/python/requirements/py3.9-requirements.txt new file mode 100644 index 0000000000..1098331989 --- /dev/null +++ b/sdk/python/requirements/py3.9-requirements.txt @@ -0,0 +1,169 @@ +# +# This file is autogenerated by pip-compile with python 3.9 +# To update, run: +# +# pip-compile --output-file=requirements/py3.9-requirements.txt +# +absl-py==1.0.0 + # via tensorflow-metadata +anyio==3.5.0 + # via + # starlette + # watchgod +asgiref==3.5.0 + # via uvicorn +attrs==21.4.0 + # via jsonschema +cachetools==5.0.0 + # via google-auth +certifi==2021.10.8 + # via requests +charset-normalizer==2.0.12 + # via requests +click==8.1.2 + # via + # feast (setup.py) + # uvicorn +cloudpickle==2.0.0 + # via dask +colorama==0.4.4 + # via feast (setup.py) +dask==2022.1.1 + # via feast (setup.py) +dill==0.3.4 + # via feast (setup.py) +fastapi==0.75.1 + # via feast (setup.py) +fastavro==1.4.10 + # via + # feast (setup.py) + # pandavro +fsspec==2022.3.0 + # via dask +google-api-core==2.7.2 + # via feast (setup.py) +google-auth==2.6.5 + # via google-api-core +googleapis-common-protos==1.52.0 + # via + # feast (setup.py) + # google-api-core + # tensorflow-metadata +grpcio==1.44.0 + # via + # feast (setup.py) + # grpcio-reflection +grpcio-reflection==1.44.0 + # via feast (setup.py) +h11==0.13.0 + # via uvicorn +httptools==0.4.0 + # via uvicorn +idna==3.3 + # via + # anyio + # requests +jinja2==3.1.1 + # via feast (setup.py) +jsonschema==4.4.0 + # via feast (setup.py) +locket==0.2.1 + # via partd +markupsafe==2.1.1 + # via jinja2 +mmh3==3.0.0 + # via feast (setup.py) +numpy==1.21.6 + # via + # feast (setup.py) + # pandas + # pandavro + # pyarrow +packaging==21.3 + # via dask +pandas==1.4.2 + # via + # feast (setup.py) + # pandavro +pandavro==1.5.2 + # via feast (setup.py) +partd==1.2.0 + # via dask +proto-plus==1.19.6 + # via feast (setup.py) +protobuf==3.19.4 + # via + # feast (setup.py) + # google-api-core + # googleapis-common-protos + # grpcio-reflection + # proto-plus + # tensorflow-metadata +pyarrow==7.0.0 + # via feast (setup.py) +pyasn1==0.4.8 + # via + # pyasn1-modules + # rsa +pyasn1-modules==0.2.8 + # via google-auth +pydantic==1.9.0 + # via + # fastapi + # feast (setup.py) +pyparsing==3.0.8 + # via packaging +pyrsistent==0.18.1 + # via jsonschema +python-dateutil==2.8.2 + # via pandas +python-dotenv==0.20.0 + # via uvicorn +pytz==2022.1 + # via pandas +pyyaml==6.0 + # via + # dask + # feast (setup.py) + # uvicorn +requests==2.27.1 + # via google-api-core +rsa==4.8 + # via google-auth +six==1.16.0 + # via + # absl-py + # google-auth + # grpcio + # pandavro + # python-dateutil +sniffio==1.2.0 + # via anyio +starlette==0.17.1 + # via fastapi +tabulate==0.8.9 + # via feast (setup.py) +tenacity==8.0.1 + # via feast (setup.py) +tensorflow-metadata==1.7.0 + # via feast (setup.py) +toml==0.10.2 + # via feast (setup.py) +toolz==0.11.2 + # via + # dask + # partd +tqdm==4.64.0 + # via feast (setup.py) +typing-extensions==4.1.1 + # via pydantic +urllib3==1.26.9 + # via requests +uvicorn[standard]==0.17.6 + # via feast (setup.py) +uvloop==0.16.0 + # via uvicorn +watchgod==0.8.2 + # via uvicorn +websockets==10.2 + # via uvicorn diff --git a/sdk/python/setup.cfg b/sdk/python/setup.cfg index ae8fe14ba1..e2d707e272 100644 --- a/sdk/python/setup.cfg +++ b/sdk/python/setup.cfg @@ -1,10 +1,11 @@ [isort] +src_paths = feast,tests multi_line_output=3 include_trailing_comma=True force_grid_wrap=0 use_parentheses=True line_length=88 -skip=feast/protos +skip=feast/protos,feast/embedded_go/lib known_first_party=feast,feast_serving_server,feast_core_server default_section=THIRDPARTY @@ -13,8 +14,9 @@ ignore = E203, E266, E501, W503 max-line-length = 88 max-complexity = 20 select = B,C,E,F,W,T4 -exclude = .git,__pycache__,docs/conf.py,dist,feast/protos +exclude = .git,__pycache__,docs/conf.py,dist,feast/protos,feast/embedded_go/lib [mypy] -files=feast,test +files=feast,tests ignore_missing_imports=true +exclude=feast/embedded_go/lib diff --git a/sdk/python/setup.py b/sdk/python/setup.py index bd51956160..15840f2d1a 100644 --- a/sdk/python/setup.py +++ b/sdk/python/setup.py @@ -11,25 +11,32 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import copy import glob +import json import os +import pathlib import re +import shutil import subprocess - +import sys from distutils.cmd import Command -from setuptools import find_packages +from distutils.dir_util import copy_tree +from pathlib import Path +from subprocess import CalledProcessError + +from setuptools import find_packages, Extension try: from setuptools import setup - from setuptools.command.install import install - from setuptools.command.develop import develop - from setuptools.command.egg_info import egg_info - from setuptools.command.sdist import sdist from setuptools.command.build_py import build_py + from setuptools.command.build_ext import build_ext as _build_ext + from setuptools.command.develop import develop + from setuptools.command.install import install except ImportError: - from distutils.core import setup - from distutils.command.install import install from distutils.command.build_py import build_py + from distutils.command.build_ext import build_ext as _build_ext + from distutils.core import setup NAME = "feast" DESCRIPTION = "Python SDK for Feast" @@ -38,89 +45,134 @@ REQUIRES_PYTHON = ">=3.7.0" REQUIRED = [ - "Click==7.*", + "click>=7.0.0", "colorama>=0.3.9", + "dill==0.3.*", "fastavro>=1.1.0", "google-api-core>=1.23.0", "googleapis-common-protos==1.52.*", "grpcio>=1.34.0", + "grpcio-reflection>=1.34.0", "Jinja2>=2.0.0", "jsonschema", "mmh3", + "numpy<1.22", # 1.22 drops support for python 3.7. "pandas>=1.0.0", "pandavro==1.5.*", - "protobuf>=3.10", - "pyarrow>=2.0.0", + "protobuf>=3.10,<3.20", + "proto-plus<1.19.7", + "pyarrow>=4.0.0", "pydantic>=1.0.0", - "PyYAML==5.3.*", + "PyYAML>=5.4.*", "tabulate==0.8.*", "tenacity>=7.*", "toml==0.10.*", "tqdm==4.*", + "fastapi>=0.68.0", + "uvicorn[standard]>=0.14.0", + "proto-plus<1.19.7", + "tensorflow-metadata>=1.0.0,<2.0.0", + "dask>=2021.*,<2022.02.0", ] GCP_REQUIRED = [ - "google-cloud-bigquery>=2.0.*", + "google-cloud-bigquery>=2,<3", "google-cloud-bigquery-storage >= 2.0.0", "google-cloud-datastore>=2.1.*", - "google-cloud-storage>=1.20.*", - "google-cloud-core==1.4.*", + "google-cloud-storage>=1.34.*,<1.41", + "google-cloud-core>=1.4.0,<2.0.0", ] REDIS_REQUIRED = [ - "redis-py-cluster==2.1.2", + "redis==4.2.2", + "hiredis>=2.0.0", ] AWS_REQUIRED = [ - "boto3==1.17.*", + "boto3>=1.17.0", + "docker>=5.0.2", ] -CI_REQUIRED = [ - "cryptography==3.3.2", - "flake8", - "black==19.10b0", - "isort>=5", - "grpcio-tools==1.34.0", - "grpcio-testing==1.34.0", - "mock==2.0.0", - "moto", - "mypy==0.790", - "mypy-protobuf==1.24", - "avro==1.10.0", - "gcsfs", - "urllib3>=1.25.4", - "pytest==6.0.0", - "pytest-cov", - "pytest-xdist", - "pytest-lazy-fixture==0.6.3", - "pytest-timeout==1.4.2", - "pytest-ordering==0.6.*", - "pytest-mock==1.10.4", - "Sphinx!=4.0.0", - "sphinx-rtd-theme", - "adlfs==0.5.9", - "firebase-admin==4.5.2", - "pre-commit", - "assertpy==1.1", - "google-cloud-bigquery>=2.0.*", - "google-cloud-bigquery-storage >= 2.0.0", - "google-cloud-datastore>=2.1.*", - "google-cloud-storage>=1.20.*", - "google-cloud-core==1.4.*", - "redis-py-cluster==2.1.2", - "boto3==1.17.*", +SNOWFLAKE_REQUIRED = [ + "snowflake-connector-python[pandas]>=2.7.3", ] +SPARK_REQUIRED = [ + "pyspark>=3.0.0", +] -# README file from Feast repo root directory -repo_root = ( - subprocess.Popen(["git", "rev-parse", "--show-toplevel"], stdout=subprocess.PIPE) - .communicate()[0] - .rstrip() - .decode("utf-8") +TRINO_REQUIRED = [ + "trino>=0.305.0,<0.400.0", +] + +GE_REQUIRED = [ + "great_expectations>=0.14.0,<0.15.0" +] + +GO_REQUIRED = [ + "cffi==1.15.*", +] + +CI_REQUIRED = ( + [ + "cryptography==3.4.8", + "flake8", + "black==19.10b0", + "isort>=5", + "grpcio-tools==1.44.0", + "grpcio-testing==1.44.0", + "minio==7.1.0", + "mock==2.0.0", + "moto", + "mypy==0.931", + "mypy-protobuf==3.1", + "avro==1.10.0", + "gcsfs", + "urllib3>=1.25.4", + "psutil==5.9.0", + "pytest>=6.0.0", + "pytest-cov", + "pytest-xdist", + "pytest-benchmark>=3.4.1", + "pytest-lazy-fixture==0.6.3", + "pytest-timeout==1.4.2", + "pytest-ordering==0.6.*", + "pytest-mock==1.10.4", + "Sphinx!=4.0.0,<4.4.0", + "sphinx-rtd-theme", + "testcontainers>=3.5", + "adlfs==0.5.9", + "firebase-admin==4.5.2", + "pre-commit", + "assertpy==1.1", + "pip-tools", + "pybindgen", + "types-protobuf", + "types-python-dateutil", + "types-pytz", + "types-PyYAML", + "types-redis", + "types-requests", + "types-setuptools", + "types-tabulate", + ] + + GCP_REQUIRED + + REDIS_REQUIRED + + AWS_REQUIRED + + SNOWFLAKE_REQUIRED + + SPARK_REQUIRED + + TRINO_REQUIRED + + GE_REQUIRED ) + +DEV_REQUIRED = ["mypy-protobuf==3.1", "grpcio-testing==1.*"] + CI_REQUIRED + +# Get git repo root directory +repo_root = str(pathlib.Path(__file__).resolve().parent.parent.parent) + +# README file from Feast repo root directory README_FILE = os.path.join(repo_root, "README.md") -with open(README_FILE, "r") as f: +with open(README_FILE, "r", encoding="utf8") as f: LONG_DESCRIPTION = f.read() # Add Support for parsing tags that have a prefix containing '/' (ie 'sdk/go') to setuptools_scm. @@ -130,53 +182,172 @@ r"^(?:[\/\w-]+)?(?P[vV]?\d+(?:\.\d+){0,2}[^\+]*)(?:\+.*)?$" ) +# Only set use_scm_version if git executable exists (setting this variable causes pip to use git under the hood) +if shutil.which("git"): + use_scm_version = {"root": "../..", "relative_to": __file__, "tag_regex": TAG_REGEX} +else: + use_scm_version = None -class BuildProtoCommand(Command): - description = "Builds the proto files into python files." +PROTO_SUBDIRS = ["core", "serving", "types", "storage"] + + +class BuildPythonProtosCommand(Command): + description = "Builds the proto files into Python files." + user_options = [] def initialize_options(self): - self.protoc = ["python", "-m", "grpc_tools.protoc"] # find_executable("protoc") + self.python_protoc = [ + sys.executable, + "-m", + "grpc_tools.protoc", + ] # find_executable("protoc") self.proto_folder = os.path.join(repo_root, "protos") - self.this_package = os.path.join(os.path.dirname(__file__) or os.getcwd(), 'feast/protos') - self.sub_folders = ["core", "serving", "types", "storage"] + self.python_folder = os.path.join( + os.path.dirname(__file__) or os.getcwd(), "feast/protos" + ) + self.sub_folders = PROTO_SUBDIRS def finalize_options(self): pass - def _generate_protos(self, path): + def _generate_python_protos(self, path: str): proto_files = glob.glob(os.path.join(self.proto_folder, path)) - - subprocess.check_call(self.protoc + [ - '-I', self.proto_folder, - '--python_out', self.this_package, - '--grpc_python_out', self.this_package, - '--mypy_out', self.this_package] + proto_files) + Path(self.python_folder).mkdir(exist_ok=True) + subprocess.check_call( + self.python_protoc + + [ + "-I", + self.proto_folder, + "--python_out", + self.python_folder, + "--grpc_python_out", + self.python_folder, + "--mypy_out", + self.python_folder, + ] + + proto_files, + ) def run(self): for sub_folder in self.sub_folders: - self._generate_protos(f'feast/{sub_folder}/*.proto') + self._generate_python_protos(f"feast/{sub_folder}/*.proto") + # We need the __init__ files for each of the generated subdirs + # so that they are regular packages, and don't need the `--namespace-packages` flags + # when being typechecked using mypy. BUT, we need to exclude `types` because that clashes + # with an existing module in the python standard library. + if sub_folder == "types": + continue + with open(f"{self.python_folder}/feast/{sub_folder}/__init__.py", 'w'): + pass - from pathlib import Path + with open(f"{self.python_folder}/__init__.py", 'w'): + pass + with open(f"{self.python_folder}/feast/__init__.py", 'w'): + pass - for path in Path('feast/protos').rglob('*.py'): + for path in Path("feast/protos").rglob("*.py"): for folder in self.sub_folders: # Read in the file - with open(path, 'r') as file: + with open(path, "r") as file: filedata = file.read() # Replace the target string - filedata = filedata.replace(f'from feast.{folder}', f'from feast.protos.feast.{folder}') + filedata = filedata.replace( + f"from feast.{folder}", f"from feast.protos.feast.{folder}" + ) # Write the file out again - with open(path, 'w') as file: + with open(path, "w") as file: file.write(filedata) +def _generate_path_with_gopath(): + go_path = subprocess.check_output(["go", "env", "GOPATH"]).decode("utf-8") + go_path = go_path.strip() + path_val = os.getenv("PATH") + path_val = f"{path_val}:{go_path}/bin" + + return path_val + + +def _ensure_go_and_proto_toolchain(): + try: + version = subprocess.check_output(["go", "version"]) + except Exception as e: + raise RuntimeError("Unable to find go toolchain") from e + + semver_string = re.search(r"go[\S]+", str(version)).group().lstrip("go") + parts = semver_string.split(".") + if not (int(parts[0]) >= 1 and int(parts[1]) >= 16): + raise RuntimeError(f"Go compiler too old; expected 1.16+ found {semver_string}") + + path_val = _generate_path_with_gopath() + + try: + subprocess.check_call(["protoc-gen-go", "--version"], env={ + "PATH": path_val + }) + subprocess.check_call(["protoc-gen-go-grpc", "--version"], env={ + "PATH": path_val + }) + except Exception as e: + raise RuntimeError("Unable to find go/grpc extensions for protoc") from e + + +class BuildGoProtosCommand(Command): + description = "Builds the proto files into Go files." + user_options = [] + + def initialize_options(self): + self.go_protoc = [ + sys.executable, + "-m", + "grpc_tools.protoc", + ] # find_executable("protoc") + self.proto_folder = os.path.join(repo_root, "protos") + self.go_folder = os.path.join(repo_root, "go/protos") + self.sub_folders = PROTO_SUBDIRS + self.path_val = _generate_path_with_gopath() + + def finalize_options(self): + pass + + def _generate_go_protos(self, path: str): + proto_files = glob.glob(os.path.join(self.proto_folder, path)) + + try: + subprocess.check_call( + self.go_protoc + + ["-I", self.proto_folder, + "--go_out", self.go_folder, + "--go_opt=module=github.com/feast-dev/feast/go/protos", + "--go-grpc_out", self.go_folder, + "--go-grpc_opt=module=github.com/feast-dev/feast/go/protos"] + + proto_files, + env={ + "PATH": self.path_val + } + ) + except CalledProcessError as e: + print(f"Stderr: {e.stderr}") + print(f"Stdout: {e.stdout}") + + def run(self): + go_dir = Path(repo_root) / "go" / "protos" + go_dir.mkdir(exist_ok=True) + for sub_folder in self.sub_folders: + self._generate_go_protos(f"feast/{sub_folder}/*.proto") + + class BuildCommand(build_py): """Custom build command.""" def run(self): - self.run_command('build_proto') + self.run_command("build_python_protos") + if os.getenv("COMPILE_GO", "false").lower() == "true": + _ensure_go_and_proto_toolchain() + self.run_command("build_go_protos") + build_py.run(self) @@ -184,12 +355,76 @@ class DevelopCommand(develop): """Custom develop command.""" def run(self): - self.run_command('build_proto') + self.run_command("build_python_protos") + if os.getenv("COMPILE_GO", "false").lower() == "true": + _ensure_go_and_proto_toolchain() + self.run_command("build_go_protos") + develop.run(self) +class build_ext(_build_ext): + def finalize_options(self) -> None: + super().finalize_options() + if os.getenv("COMPILE_GO", "false").lower() == "false": + self.extensions = [e for e in self.extensions if not self._is_go_ext(e)] + + def _is_go_ext(self, ext: Extension): + return any(source.endswith('.go') or source.startswith('github') for source in ext.sources) + + def build_extension(self, ext: Extension): + if not self._is_go_ext(ext): + # the base class may mutate `self.compiler` + compiler = copy.deepcopy(self.compiler) + self.compiler, compiler = compiler, self.compiler + try: + return _build_ext.build_extension(self, ext) + finally: + self.compiler, compiler = compiler, self.compiler + + bin_path = _generate_path_with_gopath() + go_env = json.loads( + subprocess.check_output(["go", "env", "-json"]).decode("utf-8").strip() + ) + + destination = os.path.dirname(os.path.abspath(self.get_ext_fullpath(ext.name))) + subprocess.check_call([ + "gopy", + "build", + "-output", + destination, + "-vm", + sys.executable, + "-no-make", + *ext.sources + ], env={ + "PATH": bin_path, + "CGO_LDFLAGS_ALLOW": ".*", + **go_env, + }) + + def copy_extensions_to_source(self): + build_py = self.get_finalized_command('build_py') + for ext in self.extensions: + fullname = self.get_ext_fullname(ext.name) + modpath = fullname.split('.') + package = '.'.join(modpath[:-1]) + package_dir = build_py.get_package_dir(package) + src = os.path.join(self.build_lib, package_dir) + # copy whole directory + copy_tree(src, package_dir) + +# Get version from version module. +my_path = os.path.abspath(os.path.dirname(__file__)) +path = os.path.join(my_path, "feast/version.py") +with open(path) as fp: + globals_dict = {} + exec(fp.read(), globals_dict) # pylint: disable=exec-used +__version__ = globals_dict["__version__"] + setup( name=NAME, + version=__version__, author=AUTHOR, description=DESCRIPTION, long_description=LONG_DESCRIPTION, @@ -201,11 +436,16 @@ def run(self): # https://stackoverflow.com/questions/28509965/setuptools-development-requirements # Install dev requirements with: pip install -e .[dev] extras_require={ - "dev": ["mypy-protobuf==1.*", "grpcio-testing==1.*"], + "dev": DEV_REQUIRED, "ci": CI_REQUIRED, "gcp": GCP_REQUIRED, "aws": AWS_REQUIRED, "redis": REDIS_REQUIRED, + "snowflake": SNOWFLAKE_REQUIRED, + "spark": SPARK_REQUIRED, + "trino": TRINO_REQUIRED, + "ge": GE_REQUIRED, + "go": GO_REQUIRED, }, include_package_data=True, license="Apache", @@ -218,20 +458,32 @@ def run(self): "Programming Language :: Python :: 3.7", ], entry_points={"console_scripts": ["feast=feast.cli:cli"]}, - use_scm_version={"root": "../..", "relative_to": __file__, "tag_regex": TAG_REGEX}, - setup_requires=["setuptools_scm", "grpcio", "grpcio-tools==1.34.0", "mypy-protobuf", "sphinx!=4.0.0"], + # use_scm_version=use_scm_version, + setup_requires=[ + "setuptools_scm", + "grpcio", + "grpcio-tools==1.44.0", + "mypy-protobuf==3.1", + "sphinx!=4.0.0", + ], package_data={ "": [ "protos/feast/**/*.proto", "protos/feast/third_party/grpc/health/v1/*.proto", - "protos/tensorflow_metadata/proto/v0/*.proto", "feast/protos/feast/**/*.py", - "tensorflow_metadata/proto/v0/*.py" ], }, cmdclass={ - "build_proto": BuildProtoCommand, + "build_python_protos": BuildPythonProtosCommand, + "build_go_protos": BuildGoProtosCommand, "build_py": BuildCommand, "develop": DevelopCommand, + "build_ext": build_ext, }, + # ext_modules=[ + # Extension( + # "feast.embedded_go.lib._embedded", + # ["github.com/feast-dev/feast/go/embedded"], + # ) + # ], ) diff --git a/sdk/python/tensorflow_metadata/proto/v0/path_pb2.py b/sdk/python/tensorflow_metadata/proto/v0/path_pb2.py deleted file mode 100644 index 4b6dec828c..0000000000 --- a/sdk/python/tensorflow_metadata/proto/v0/path_pb2.py +++ /dev/null @@ -1,71 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: tensorflow_metadata/proto/v0/path.proto -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='tensorflow_metadata/proto/v0/path.proto', - package='tensorflow.metadata.v0', - syntax='proto2', - serialized_options=b'\n\032org.tensorflow.metadata.v0P\001ZEgithub.com/feast-dev/feast/sdk/go/protos/tensorflow_metadata/proto/v0\370\001\001', - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n\'tensorflow_metadata/proto/v0/path.proto\x12\x16tensorflow.metadata.v0\"\x14\n\x04Path\x12\x0c\n\x04step\x18\x01 \x03(\tBh\n\x1aorg.tensorflow.metadata.v0P\x01ZEgithub.com/feast-dev/feast/sdk/go/protos/tensorflow_metadata/proto/v0\xf8\x01\x01' -) - - - - -_PATH = _descriptor.Descriptor( - name='Path', - full_name='tensorflow.metadata.v0.Path', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='step', full_name='tensorflow.metadata.v0.Path.step', index=0, - number=1, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=67, - serialized_end=87, -) - -DESCRIPTOR.message_types_by_name['Path'] = _PATH -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -Path = _reflection.GeneratedProtocolMessageType('Path', (_message.Message,), { - 'DESCRIPTOR' : _PATH, - '__module__' : 'tensorflow_metadata.proto.v0.path_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.Path) - }) -_sym_db.RegisterMessage(Path) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/tensorflow_metadata/proto/v0/path_pb2.pyi b/sdk/python/tensorflow_metadata/proto/v0/path_pb2.pyi deleted file mode 100644 index 82fccfa5fa..0000000000 --- a/sdk/python/tensorflow_metadata/proto/v0/path_pb2.pyi +++ /dev/null @@ -1,46 +0,0 @@ -""" -@generated by mypy-protobuf. Do not edit manually! -isort:skip_file -""" -from google.protobuf.descriptor import ( - Descriptor as google___protobuf___descriptor___Descriptor, - FileDescriptor as google___protobuf___descriptor___FileDescriptor, -) - -from google.protobuf.internal.containers import ( - RepeatedScalarFieldContainer as google___protobuf___internal___containers___RepeatedScalarFieldContainer, -) - -from google.protobuf.message import ( - Message as google___protobuf___message___Message, -) - -from typing import ( - Iterable as typing___Iterable, - Optional as typing___Optional, - Text as typing___Text, -) - -from typing_extensions import ( - Literal as typing_extensions___Literal, -) - - -builtin___bool = bool -builtin___bytes = bytes -builtin___float = float -builtin___int = int - - -DESCRIPTOR: google___protobuf___descriptor___FileDescriptor = ... - -class Path(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - step: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] = ... - - def __init__(self, - *, - step : typing___Optional[typing___Iterable[typing___Text]] = None, - ) -> None: ... - def ClearField(self, field_name: typing_extensions___Literal[u"step",b"step"]) -> None: ... -type___Path = Path diff --git a/sdk/python/tensorflow_metadata/proto/v0/schema_pb2.py b/sdk/python/tensorflow_metadata/proto/v0/schema_pb2.py deleted file mode 100644 index d3bfc50616..0000000000 --- a/sdk/python/tensorflow_metadata/proto/v0/schema_pb2.py +++ /dev/null @@ -1,2331 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: tensorflow_metadata/proto/v0/schema.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 -from tensorflow_metadata.proto.v0 import path_pb2 as tensorflow__metadata_dot_proto_dot_v0_dot_path__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='tensorflow_metadata/proto/v0/schema.proto', - package='tensorflow.metadata.v0', - syntax='proto2', - serialized_options=b'\n\032org.tensorflow.metadata.v0P\001ZEgithub.com/feast-dev/feast/sdk/go/protos/tensorflow_metadata/proto/v0\370\001\001', - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n)tensorflow_metadata/proto/v0/schema.proto\x12\x16tensorflow.metadata.v0\x1a\x19google/protobuf/any.proto\x1a\'tensorflow_metadata/proto/v0/path.proto\"\xe2\x05\n\x06Schema\x12\x30\n\x07\x66\x65\x61ture\x18\x01 \x03(\x0b\x32\x1f.tensorflow.metadata.v0.Feature\x12=\n\x0esparse_feature\x18\x06 \x03(\x0b\x32%.tensorflow.metadata.v0.SparseFeature\x12\x41\n\x10weighted_feature\x18\x0c \x03(\x0b\x32\'.tensorflow.metadata.v0.WeightedFeature\x12;\n\rstring_domain\x18\x04 \x03(\x0b\x32$.tensorflow.metadata.v0.StringDomain\x12\x39\n\x0c\x66loat_domain\x18\t \x03(\x0b\x32#.tensorflow.metadata.v0.FloatDomain\x12\x35\n\nint_domain\x18\n \x03(\x0b\x32!.tensorflow.metadata.v0.IntDomain\x12\x1b\n\x13\x64\x65\x66\x61ult_environment\x18\x05 \x03(\t\x12\x36\n\nannotation\x18\x08 \x01(\x0b\x32\".tensorflow.metadata.v0.Annotation\x12G\n\x13\x64\x61taset_constraints\x18\x0b \x01(\x0b\x32*.tensorflow.metadata.v0.DatasetConstraints\x12\x62\n\x1btensor_representation_group\x18\r \x03(\x0b\x32=.tensorflow.metadata.v0.Schema.TensorRepresentationGroupEntry\x1as\n\x1eTensorRepresentationGroupEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12@\n\x05value\x18\x02 \x01(\x0b\x32\x31.tensorflow.metadata.v0.TensorRepresentationGroup:\x02\x38\x01\"\xdf\x0b\n\x07\x46\x65\x61ture\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x16\n\ndeprecated\x18\x02 \x01(\x08\x42\x02\x18\x01\x12;\n\x08presence\x18\x0e \x01(\x0b\x32\'.tensorflow.metadata.v0.FeaturePresenceH\x00\x12L\n\x0egroup_presence\x18\x11 \x01(\x0b\x32\x32.tensorflow.metadata.v0.FeaturePresenceWithinGroupH\x00\x12\x33\n\x05shape\x18\x17 \x01(\x0b\x32\".tensorflow.metadata.v0.FixedShapeH\x01\x12\x39\n\x0bvalue_count\x18\x05 \x01(\x0b\x32\".tensorflow.metadata.v0.ValueCountH\x01\x12\x31\n\x04type\x18\x06 \x01(\x0e\x32#.tensorflow.metadata.v0.FeatureType\x12\x10\n\x06\x64omain\x18\x07 \x01(\tH\x02\x12\x37\n\nint_domain\x18\t \x01(\x0b\x32!.tensorflow.metadata.v0.IntDomainH\x02\x12;\n\x0c\x66loat_domain\x18\n \x01(\x0b\x32#.tensorflow.metadata.v0.FloatDomainH\x02\x12=\n\rstring_domain\x18\x0b \x01(\x0b\x32$.tensorflow.metadata.v0.StringDomainH\x02\x12\x39\n\x0b\x62ool_domain\x18\r \x01(\x0b\x32\".tensorflow.metadata.v0.BoolDomainH\x02\x12=\n\rstruct_domain\x18\x1d \x01(\x0b\x32$.tensorflow.metadata.v0.StructDomainH\x02\x12P\n\x17natural_language_domain\x18\x18 \x01(\x0b\x32-.tensorflow.metadata.v0.NaturalLanguageDomainH\x02\x12;\n\x0cimage_domain\x18\x19 \x01(\x0b\x32#.tensorflow.metadata.v0.ImageDomainH\x02\x12\x37\n\nmid_domain\x18\x1a \x01(\x0b\x32!.tensorflow.metadata.v0.MIDDomainH\x02\x12\x37\n\nurl_domain\x18\x1b \x01(\x0b\x32!.tensorflow.metadata.v0.URLDomainH\x02\x12\x39\n\x0btime_domain\x18\x1c \x01(\x0b\x32\".tensorflow.metadata.v0.TimeDomainH\x02\x12\x45\n\x12time_of_day_domain\x18\x1e \x01(\x0b\x32\'.tensorflow.metadata.v0.TimeOfDayDomainH\x02\x12Q\n\x18\x64istribution_constraints\x18\x0f \x01(\x0b\x32/.tensorflow.metadata.v0.DistributionConstraints\x12\x36\n\nannotation\x18\x10 \x01(\x0b\x32\".tensorflow.metadata.v0.Annotation\x12\x42\n\x0fskew_comparator\x18\x12 \x01(\x0b\x32).tensorflow.metadata.v0.FeatureComparator\x12\x43\n\x10\x64rift_comparator\x18\x15 \x01(\x0b\x32).tensorflow.metadata.v0.FeatureComparator\x12\x16\n\x0ein_environment\x18\x14 \x03(\t\x12\x1a\n\x12not_in_environment\x18\x13 \x03(\t\x12?\n\x0flifecycle_stage\x18\x16 \x01(\x0e\x32&.tensorflow.metadata.v0.LifecycleStageB\x16\n\x14presence_constraintsB\x0c\n\nshape_typeB\r\n\x0b\x64omain_info\"X\n\nAnnotation\x12\x0b\n\x03tag\x18\x01 \x03(\t\x12\x0f\n\x07\x63omment\x18\x02 \x03(\t\x12,\n\x0e\x65xtra_metadata\x18\x03 \x03(\x0b\x32\x14.google.protobuf.Any\"X\n\x16NumericValueComparator\x12\x1e\n\x16min_fraction_threshold\x18\x01 \x01(\x01\x12\x1e\n\x16max_fraction_threshold\x18\x02 \x01(\x01\"\xe0\x01\n\x12\x44\x61tasetConstraints\x12U\n\x1dnum_examples_drift_comparator\x18\x01 \x01(\x0b\x32..tensorflow.metadata.v0.NumericValueComparator\x12W\n\x1fnum_examples_version_comparator\x18\x02 \x01(\x0b\x32..tensorflow.metadata.v0.NumericValueComparator\x12\x1a\n\x12min_examples_count\x18\x03 \x01(\x03\"d\n\nFixedShape\x12\x33\n\x03\x64im\x18\x02 \x03(\x0b\x32&.tensorflow.metadata.v0.FixedShape.Dim\x1a!\n\x03\x44im\x12\x0c\n\x04size\x18\x01 \x01(\x03\x12\x0c\n\x04name\x18\x02 \x01(\t\"&\n\nValueCount\x12\x0b\n\x03min\x18\x01 \x01(\x03\x12\x0b\n\x03max\x18\x02 \x01(\x03\"\xc5\x01\n\x0fWeightedFeature\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\x07\x66\x65\x61ture\x18\x02 \x01(\x0b\x32\x1c.tensorflow.metadata.v0.Path\x12\x34\n\x0eweight_feature\x18\x03 \x01(\x0b\x32\x1c.tensorflow.metadata.v0.Path\x12?\n\x0flifecycle_stage\x18\x04 \x01(\x0e\x32&.tensorflow.metadata.v0.LifecycleStage\"\x90\x04\n\rSparseFeature\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x16\n\ndeprecated\x18\x02 \x01(\x08\x42\x02\x18\x01\x12?\n\x0flifecycle_stage\x18\x07 \x01(\x0e\x32&.tensorflow.metadata.v0.LifecycleStage\x12=\n\x08presence\x18\x04 \x01(\x0b\x32\'.tensorflow.metadata.v0.FeaturePresenceB\x02\x18\x01\x12\x37\n\x0b\x64\x65nse_shape\x18\x05 \x01(\x0b\x32\".tensorflow.metadata.v0.FixedShape\x12I\n\rindex_feature\x18\x06 \x03(\x0b\x32\x32.tensorflow.metadata.v0.SparseFeature.IndexFeature\x12\x11\n\tis_sorted\x18\x08 \x01(\x08\x12I\n\rvalue_feature\x18\t \x01(\x0b\x32\x32.tensorflow.metadata.v0.SparseFeature.ValueFeature\x12\x35\n\x04type\x18\n \x01(\x0e\x32#.tensorflow.metadata.v0.FeatureTypeB\x02\x18\x01\x1a\x1c\n\x0cIndexFeature\x12\x0c\n\x04name\x18\x01 \x01(\t\x1a\x1c\n\x0cValueFeature\x12\x0c\n\x04name\x18\x01 \x01(\tJ\x04\x08\x0b\x10\x0c\"5\n\x17\x44istributionConstraints\x12\x1a\n\x0fmin_domain_mass\x18\x01 \x01(\x01:\x01\x31\"K\n\tIntDomain\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0b\n\x03min\x18\x03 \x01(\x03\x12\x0b\n\x03max\x18\x04 \x01(\x03\x12\x16\n\x0eis_categorical\x18\x05 \x01(\x08\"5\n\x0b\x46loatDomain\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0b\n\x03min\x18\x03 \x01(\x02\x12\x0b\n\x03max\x18\x04 \x01(\x02\"\x7f\n\x0cStructDomain\x12\x30\n\x07\x66\x65\x61ture\x18\x01 \x03(\x0b\x32\x1f.tensorflow.metadata.v0.Feature\x12=\n\x0esparse_feature\x18\x02 \x03(\x0b\x32%.tensorflow.metadata.v0.SparseFeature\"+\n\x0cStringDomain\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x03(\t\"C\n\nBoolDomain\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ntrue_value\x18\x02 \x01(\t\x12\x13\n\x0b\x66\x61lse_value\x18\x03 \x01(\t\"\x17\n\x15NaturalLanguageDomain\"\r\n\x0bImageDomain\"\x0b\n\tMIDDomain\"\x0b\n\tURLDomain\"\x8e\x02\n\nTimeDomain\x12\x17\n\rstring_format\x18\x01 \x01(\tH\x00\x12N\n\x0einteger_format\x18\x02 \x01(\x0e\x32\x34.tensorflow.metadata.v0.TimeDomain.IntegerTimeFormatH\x00\"\x8c\x01\n\x11IntegerTimeFormat\x12\x12\n\x0e\x46ORMAT_UNKNOWN\x10\x00\x12\r\n\tUNIX_DAYS\x10\x05\x12\x10\n\x0cUNIX_SECONDS\x10\x01\x12\x15\n\x11UNIX_MILLISECONDS\x10\x02\x12\x15\n\x11UNIX_MICROSECONDS\x10\x03\x12\x14\n\x10UNIX_NANOSECONDS\x10\x04\x42\x08\n\x06\x66ormat\"\xd1\x01\n\x0fTimeOfDayDomain\x12\x17\n\rstring_format\x18\x01 \x01(\tH\x00\x12X\n\x0einteger_format\x18\x02 \x01(\x0e\x32>.tensorflow.metadata.v0.TimeOfDayDomain.IntegerTimeOfDayFormatH\x00\"A\n\x16IntegerTimeOfDayFormat\x12\x12\n\x0e\x46ORMAT_UNKNOWN\x10\x00\x12\x13\n\x0fPACKED_64_NANOS\x10\x01\x42\x08\n\x06\x66ormat\":\n\x0f\x46\x65\x61turePresence\x12\x14\n\x0cmin_fraction\x18\x01 \x01(\x01\x12\x11\n\tmin_count\x18\x02 \x01(\x03\".\n\x1a\x46\x65\x61turePresenceWithinGroup\x12\x10\n\x08required\x18\x01 \x01(\x08\"!\n\x0cInfinityNorm\x12\x11\n\tthreshold\x18\x01 \x01(\x01\"P\n\x11\x46\x65\x61tureComparator\x12;\n\rinfinity_norm\x18\x01 \x01(\x0b\x32$.tensorflow.metadata.v0.InfinityNorm\"\xeb\x05\n\x14TensorRepresentation\x12P\n\x0c\x64\x65nse_tensor\x18\x01 \x01(\x0b\x32\x38.tensorflow.metadata.v0.TensorRepresentation.DenseTensorH\x00\x12_\n\x14varlen_sparse_tensor\x18\x02 \x01(\x0b\x32?.tensorflow.metadata.v0.TensorRepresentation.VarLenSparseTensorH\x00\x12R\n\rsparse_tensor\x18\x03 \x01(\x0b\x32\x39.tensorflow.metadata.v0.TensorRepresentation.SparseTensorH\x00\x1ao\n\x0c\x44\x65\x66\x61ultValue\x12\x15\n\x0b\x66loat_value\x18\x01 \x01(\x01H\x00\x12\x13\n\tint_value\x18\x02 \x01(\x03H\x00\x12\x15\n\x0b\x62ytes_value\x18\x03 \x01(\x0cH\x00\x12\x14\n\nuint_value\x18\x04 \x01(\x04H\x00\x42\x06\n\x04kind\x1a\xa7\x01\n\x0b\x44\x65nseTensor\x12\x13\n\x0b\x63olumn_name\x18\x01 \x01(\t\x12\x31\n\x05shape\x18\x02 \x01(\x0b\x32\".tensorflow.metadata.v0.FixedShape\x12P\n\rdefault_value\x18\x03 \x01(\x0b\x32\x39.tensorflow.metadata.v0.TensorRepresentation.DefaultValue\x1a)\n\x12VarLenSparseTensor\x12\x13\n\x0b\x63olumn_name\x18\x01 \x01(\t\x1a~\n\x0cSparseTensor\x12\x37\n\x0b\x64\x65nse_shape\x18\x01 \x01(\x0b\x32\".tensorflow.metadata.v0.FixedShape\x12\x1a\n\x12index_column_names\x18\x02 \x03(\t\x12\x19\n\x11value_column_name\x18\x03 \x01(\tB\x06\n\x04kind\"\xf2\x01\n\x19TensorRepresentationGroup\x12j\n\x15tensor_representation\x18\x01 \x03(\x0b\x32K.tensorflow.metadata.v0.TensorRepresentationGroup.TensorRepresentationEntry\x1ai\n\x19TensorRepresentationEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12;\n\x05value\x18\x02 \x01(\x0b\x32,.tensorflow.metadata.v0.TensorRepresentation:\x02\x38\x01*u\n\x0eLifecycleStage\x12\x11\n\rUNKNOWN_STAGE\x10\x00\x12\x0b\n\x07PLANNED\x10\x01\x12\t\n\x05\x41LPHA\x10\x02\x12\x08\n\x04\x42\x45TA\x10\x03\x12\x0e\n\nPRODUCTION\x10\x04\x12\x0e\n\nDEPRECATED\x10\x05\x12\x0e\n\nDEBUG_ONLY\x10\x06*J\n\x0b\x46\x65\x61tureType\x12\x10\n\x0cTYPE_UNKNOWN\x10\x00\x12\t\n\x05\x42YTES\x10\x01\x12\x07\n\x03INT\x10\x02\x12\t\n\x05\x46LOAT\x10\x03\x12\n\n\x06STRUCT\x10\x04\x42h\n\x1aorg.tensorflow.metadata.v0P\x01ZEgithub.com/feast-dev/feast/sdk/go/protos/tensorflow_metadata/proto/v0\xf8\x01\x01' - , - dependencies=[google_dot_protobuf_dot_any__pb2.DESCRIPTOR,tensorflow__metadata_dot_proto_dot_v0_dot_path__pb2.DESCRIPTOR,]) - -_LIFECYCLESTAGE = _descriptor.EnumDescriptor( - name='LifecycleStage', - full_name='tensorflow.metadata.v0.LifecycleStage', - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name='UNKNOWN_STAGE', index=0, number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='PLANNED', index=1, number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='ALPHA', index=2, number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='BETA', index=3, number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='PRODUCTION', index=4, number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='DEPRECATED', index=5, number=5, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='DEBUG_ONLY', index=6, number=6, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - ], - containing_type=None, - serialized_options=None, - serialized_start=5865, - serialized_end=5982, -) -_sym_db.RegisterEnumDescriptor(_LIFECYCLESTAGE) - -LifecycleStage = enum_type_wrapper.EnumTypeWrapper(_LIFECYCLESTAGE) -_FEATURETYPE = _descriptor.EnumDescriptor( - name='FeatureType', - full_name='tensorflow.metadata.v0.FeatureType', - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name='TYPE_UNKNOWN', index=0, number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='BYTES', index=1, number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='INT', index=2, number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='FLOAT', index=3, number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='STRUCT', index=4, number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - ], - containing_type=None, - serialized_options=None, - serialized_start=5984, - serialized_end=6058, -) -_sym_db.RegisterEnumDescriptor(_FEATURETYPE) - -FeatureType = enum_type_wrapper.EnumTypeWrapper(_FEATURETYPE) -UNKNOWN_STAGE = 0 -PLANNED = 1 -ALPHA = 2 -BETA = 3 -PRODUCTION = 4 -DEPRECATED = 5 -DEBUG_ONLY = 6 -TYPE_UNKNOWN = 0 -BYTES = 1 -INT = 2 -FLOAT = 3 -STRUCT = 4 - - -_TIMEDOMAIN_INTEGERTIMEFORMAT = _descriptor.EnumDescriptor( - name='IntegerTimeFormat', - full_name='tensorflow.metadata.v0.TimeDomain.IntegerTimeFormat', - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name='FORMAT_UNKNOWN', index=0, number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='UNIX_DAYS', index=1, number=5, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='UNIX_SECONDS', index=2, number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='UNIX_MILLISECONDS', index=3, number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='UNIX_MICROSECONDS', index=4, number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='UNIX_NANOSECONDS', index=5, number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - ], - containing_type=None, - serialized_options=None, - serialized_start=4281, - serialized_end=4421, -) -_sym_db.RegisterEnumDescriptor(_TIMEDOMAIN_INTEGERTIMEFORMAT) - -_TIMEOFDAYDOMAIN_INTEGERTIMEOFDAYFORMAT = _descriptor.EnumDescriptor( - name='IntegerTimeOfDayFormat', - full_name='tensorflow.metadata.v0.TimeOfDayDomain.IntegerTimeOfDayFormat', - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name='FORMAT_UNKNOWN', index=0, number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='PACKED_64_NANOS', index=1, number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - ], - containing_type=None, - serialized_options=None, - serialized_start=4568, - serialized_end=4633, -) -_sym_db.RegisterEnumDescriptor(_TIMEOFDAYDOMAIN_INTEGERTIMEOFDAYFORMAT) - - -_SCHEMA_TENSORREPRESENTATIONGROUPENTRY = _descriptor.Descriptor( - name='TensorRepresentationGroupEntry', - full_name='tensorflow.metadata.v0.Schema.TensorRepresentationGroupEntry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='tensorflow.metadata.v0.Schema.TensorRepresentationGroupEntry.key', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='value', full_name='tensorflow.metadata.v0.Schema.TensorRepresentationGroupEntry.value', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=b'8\001', - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=761, - serialized_end=876, -) - -_SCHEMA = _descriptor.Descriptor( - name='Schema', - full_name='tensorflow.metadata.v0.Schema', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='feature', full_name='tensorflow.metadata.v0.Schema.feature', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='sparse_feature', full_name='tensorflow.metadata.v0.Schema.sparse_feature', index=1, - number=6, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='weighted_feature', full_name='tensorflow.metadata.v0.Schema.weighted_feature', index=2, - number=12, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='string_domain', full_name='tensorflow.metadata.v0.Schema.string_domain', index=3, - number=4, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='float_domain', full_name='tensorflow.metadata.v0.Schema.float_domain', index=4, - number=9, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='int_domain', full_name='tensorflow.metadata.v0.Schema.int_domain', index=5, - number=10, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='default_environment', full_name='tensorflow.metadata.v0.Schema.default_environment', index=6, - number=5, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='annotation', full_name='tensorflow.metadata.v0.Schema.annotation', index=7, - number=8, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='dataset_constraints', full_name='tensorflow.metadata.v0.Schema.dataset_constraints', index=8, - number=11, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='tensor_representation_group', full_name='tensorflow.metadata.v0.Schema.tensor_representation_group', index=9, - number=13, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[_SCHEMA_TENSORREPRESENTATIONGROUPENTRY, ], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=138, - serialized_end=876, -) - - -_FEATURE = _descriptor.Descriptor( - name='Feature', - full_name='tensorflow.metadata.v0.Feature', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='tensorflow.metadata.v0.Feature.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='deprecated', full_name='tensorflow.metadata.v0.Feature.deprecated', index=1, - number=2, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=b'\030\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='presence', full_name='tensorflow.metadata.v0.Feature.presence', index=2, - number=14, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='group_presence', full_name='tensorflow.metadata.v0.Feature.group_presence', index=3, - number=17, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='shape', full_name='tensorflow.metadata.v0.Feature.shape', index=4, - number=23, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='value_count', full_name='tensorflow.metadata.v0.Feature.value_count', index=5, - number=5, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='type', full_name='tensorflow.metadata.v0.Feature.type', index=6, - number=6, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='domain', full_name='tensorflow.metadata.v0.Feature.domain', index=7, - number=7, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='int_domain', full_name='tensorflow.metadata.v0.Feature.int_domain', index=8, - number=9, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='float_domain', full_name='tensorflow.metadata.v0.Feature.float_domain', index=9, - number=10, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='string_domain', full_name='tensorflow.metadata.v0.Feature.string_domain', index=10, - number=11, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='bool_domain', full_name='tensorflow.metadata.v0.Feature.bool_domain', index=11, - number=13, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='struct_domain', full_name='tensorflow.metadata.v0.Feature.struct_domain', index=12, - number=29, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='natural_language_domain', full_name='tensorflow.metadata.v0.Feature.natural_language_domain', index=13, - number=24, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='image_domain', full_name='tensorflow.metadata.v0.Feature.image_domain', index=14, - number=25, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='mid_domain', full_name='tensorflow.metadata.v0.Feature.mid_domain', index=15, - number=26, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='url_domain', full_name='tensorflow.metadata.v0.Feature.url_domain', index=16, - number=27, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='time_domain', full_name='tensorflow.metadata.v0.Feature.time_domain', index=17, - number=28, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='time_of_day_domain', full_name='tensorflow.metadata.v0.Feature.time_of_day_domain', index=18, - number=30, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='distribution_constraints', full_name='tensorflow.metadata.v0.Feature.distribution_constraints', index=19, - number=15, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='annotation', full_name='tensorflow.metadata.v0.Feature.annotation', index=20, - number=16, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='skew_comparator', full_name='tensorflow.metadata.v0.Feature.skew_comparator', index=21, - number=18, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='drift_comparator', full_name='tensorflow.metadata.v0.Feature.drift_comparator', index=22, - number=21, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='in_environment', full_name='tensorflow.metadata.v0.Feature.in_environment', index=23, - number=20, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='not_in_environment', full_name='tensorflow.metadata.v0.Feature.not_in_environment', index=24, - number=19, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='lifecycle_stage', full_name='tensorflow.metadata.v0.Feature.lifecycle_stage', index=25, - number=22, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='presence_constraints', full_name='tensorflow.metadata.v0.Feature.presence_constraints', - index=0, containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[]), - _descriptor.OneofDescriptor( - name='shape_type', full_name='tensorflow.metadata.v0.Feature.shape_type', - index=1, containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[]), - _descriptor.OneofDescriptor( - name='domain_info', full_name='tensorflow.metadata.v0.Feature.domain_info', - index=2, containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[]), - ], - serialized_start=879, - serialized_end=2382, -) - - -_ANNOTATION = _descriptor.Descriptor( - name='Annotation', - full_name='tensorflow.metadata.v0.Annotation', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='tag', full_name='tensorflow.metadata.v0.Annotation.tag', index=0, - number=1, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='comment', full_name='tensorflow.metadata.v0.Annotation.comment', index=1, - number=2, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='extra_metadata', full_name='tensorflow.metadata.v0.Annotation.extra_metadata', index=2, - number=3, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2384, - serialized_end=2472, -) - - -_NUMERICVALUECOMPARATOR = _descriptor.Descriptor( - name='NumericValueComparator', - full_name='tensorflow.metadata.v0.NumericValueComparator', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='min_fraction_threshold', full_name='tensorflow.metadata.v0.NumericValueComparator.min_fraction_threshold', index=0, - number=1, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='max_fraction_threshold', full_name='tensorflow.metadata.v0.NumericValueComparator.max_fraction_threshold', index=1, - number=2, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2474, - serialized_end=2562, -) - - -_DATASETCONSTRAINTS = _descriptor.Descriptor( - name='DatasetConstraints', - full_name='tensorflow.metadata.v0.DatasetConstraints', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='num_examples_drift_comparator', full_name='tensorflow.metadata.v0.DatasetConstraints.num_examples_drift_comparator', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='num_examples_version_comparator', full_name='tensorflow.metadata.v0.DatasetConstraints.num_examples_version_comparator', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='min_examples_count', full_name='tensorflow.metadata.v0.DatasetConstraints.min_examples_count', index=2, - number=3, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2565, - serialized_end=2789, -) - - -_FIXEDSHAPE_DIM = _descriptor.Descriptor( - name='Dim', - full_name='tensorflow.metadata.v0.FixedShape.Dim', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='size', full_name='tensorflow.metadata.v0.FixedShape.Dim.size', index=0, - number=1, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='name', full_name='tensorflow.metadata.v0.FixedShape.Dim.name', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2858, - serialized_end=2891, -) - -_FIXEDSHAPE = _descriptor.Descriptor( - name='FixedShape', - full_name='tensorflow.metadata.v0.FixedShape', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='dim', full_name='tensorflow.metadata.v0.FixedShape.dim', index=0, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[_FIXEDSHAPE_DIM, ], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2791, - serialized_end=2891, -) - - -_VALUECOUNT = _descriptor.Descriptor( - name='ValueCount', - full_name='tensorflow.metadata.v0.ValueCount', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='min', full_name='tensorflow.metadata.v0.ValueCount.min', index=0, - number=1, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='max', full_name='tensorflow.metadata.v0.ValueCount.max', index=1, - number=2, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2893, - serialized_end=2931, -) - - -_WEIGHTEDFEATURE = _descriptor.Descriptor( - name='WeightedFeature', - full_name='tensorflow.metadata.v0.WeightedFeature', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='tensorflow.metadata.v0.WeightedFeature.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='feature', full_name='tensorflow.metadata.v0.WeightedFeature.feature', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='weight_feature', full_name='tensorflow.metadata.v0.WeightedFeature.weight_feature', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='lifecycle_stage', full_name='tensorflow.metadata.v0.WeightedFeature.lifecycle_stage', index=3, - number=4, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2934, - serialized_end=3131, -) - - -_SPARSEFEATURE_INDEXFEATURE = _descriptor.Descriptor( - name='IndexFeature', - full_name='tensorflow.metadata.v0.SparseFeature.IndexFeature', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='tensorflow.metadata.v0.SparseFeature.IndexFeature.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=3598, - serialized_end=3626, -) - -_SPARSEFEATURE_VALUEFEATURE = _descriptor.Descriptor( - name='ValueFeature', - full_name='tensorflow.metadata.v0.SparseFeature.ValueFeature', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='tensorflow.metadata.v0.SparseFeature.ValueFeature.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=3628, - serialized_end=3656, -) - -_SPARSEFEATURE = _descriptor.Descriptor( - name='SparseFeature', - full_name='tensorflow.metadata.v0.SparseFeature', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='tensorflow.metadata.v0.SparseFeature.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='deprecated', full_name='tensorflow.metadata.v0.SparseFeature.deprecated', index=1, - number=2, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=b'\030\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='lifecycle_stage', full_name='tensorflow.metadata.v0.SparseFeature.lifecycle_stage', index=2, - number=7, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='presence', full_name='tensorflow.metadata.v0.SparseFeature.presence', index=3, - number=4, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=b'\030\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='dense_shape', full_name='tensorflow.metadata.v0.SparseFeature.dense_shape', index=4, - number=5, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='index_feature', full_name='tensorflow.metadata.v0.SparseFeature.index_feature', index=5, - number=6, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='is_sorted', full_name='tensorflow.metadata.v0.SparseFeature.is_sorted', index=6, - number=8, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='value_feature', full_name='tensorflow.metadata.v0.SparseFeature.value_feature', index=7, - number=9, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='type', full_name='tensorflow.metadata.v0.SparseFeature.type', index=8, - number=10, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=b'\030\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[_SPARSEFEATURE_INDEXFEATURE, _SPARSEFEATURE_VALUEFEATURE, ], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=3134, - serialized_end=3662, -) - - -_DISTRIBUTIONCONSTRAINTS = _descriptor.Descriptor( - name='DistributionConstraints', - full_name='tensorflow.metadata.v0.DistributionConstraints', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='min_domain_mass', full_name='tensorflow.metadata.v0.DistributionConstraints.min_domain_mass', index=0, - number=1, type=1, cpp_type=5, label=1, - has_default_value=True, default_value=float(1), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=3664, - serialized_end=3717, -) - - -_INTDOMAIN = _descriptor.Descriptor( - name='IntDomain', - full_name='tensorflow.metadata.v0.IntDomain', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='tensorflow.metadata.v0.IntDomain.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='min', full_name='tensorflow.metadata.v0.IntDomain.min', index=1, - number=3, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='max', full_name='tensorflow.metadata.v0.IntDomain.max', index=2, - number=4, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='is_categorical', full_name='tensorflow.metadata.v0.IntDomain.is_categorical', index=3, - number=5, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=3719, - serialized_end=3794, -) - - -_FLOATDOMAIN = _descriptor.Descriptor( - name='FloatDomain', - full_name='tensorflow.metadata.v0.FloatDomain', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='tensorflow.metadata.v0.FloatDomain.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='min', full_name='tensorflow.metadata.v0.FloatDomain.min', index=1, - number=3, type=2, cpp_type=6, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='max', full_name='tensorflow.metadata.v0.FloatDomain.max', index=2, - number=4, type=2, cpp_type=6, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=3796, - serialized_end=3849, -) - - -_STRUCTDOMAIN = _descriptor.Descriptor( - name='StructDomain', - full_name='tensorflow.metadata.v0.StructDomain', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='feature', full_name='tensorflow.metadata.v0.StructDomain.feature', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='sparse_feature', full_name='tensorflow.metadata.v0.StructDomain.sparse_feature', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=3851, - serialized_end=3978, -) - - -_STRINGDOMAIN = _descriptor.Descriptor( - name='StringDomain', - full_name='tensorflow.metadata.v0.StringDomain', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='tensorflow.metadata.v0.StringDomain.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='value', full_name='tensorflow.metadata.v0.StringDomain.value', index=1, - number=2, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=3980, - serialized_end=4023, -) - - -_BOOLDOMAIN = _descriptor.Descriptor( - name='BoolDomain', - full_name='tensorflow.metadata.v0.BoolDomain', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='tensorflow.metadata.v0.BoolDomain.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='true_value', full_name='tensorflow.metadata.v0.BoolDomain.true_value', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='false_value', full_name='tensorflow.metadata.v0.BoolDomain.false_value', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=4025, - serialized_end=4092, -) - - -_NATURALLANGUAGEDOMAIN = _descriptor.Descriptor( - name='NaturalLanguageDomain', - full_name='tensorflow.metadata.v0.NaturalLanguageDomain', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=4094, - serialized_end=4117, -) - - -_IMAGEDOMAIN = _descriptor.Descriptor( - name='ImageDomain', - full_name='tensorflow.metadata.v0.ImageDomain', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=4119, - serialized_end=4132, -) - - -_MIDDOMAIN = _descriptor.Descriptor( - name='MIDDomain', - full_name='tensorflow.metadata.v0.MIDDomain', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=4134, - serialized_end=4145, -) - - -_URLDOMAIN = _descriptor.Descriptor( - name='URLDomain', - full_name='tensorflow.metadata.v0.URLDomain', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=4147, - serialized_end=4158, -) - - -_TIMEDOMAIN = _descriptor.Descriptor( - name='TimeDomain', - full_name='tensorflow.metadata.v0.TimeDomain', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='string_format', full_name='tensorflow.metadata.v0.TimeDomain.string_format', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='integer_format', full_name='tensorflow.metadata.v0.TimeDomain.integer_format', index=1, - number=2, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _TIMEDOMAIN_INTEGERTIMEFORMAT, - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='format', full_name='tensorflow.metadata.v0.TimeDomain.format', - index=0, containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[]), - ], - serialized_start=4161, - serialized_end=4431, -) - - -_TIMEOFDAYDOMAIN = _descriptor.Descriptor( - name='TimeOfDayDomain', - full_name='tensorflow.metadata.v0.TimeOfDayDomain', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='string_format', full_name='tensorflow.metadata.v0.TimeOfDayDomain.string_format', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='integer_format', full_name='tensorflow.metadata.v0.TimeOfDayDomain.integer_format', index=1, - number=2, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _TIMEOFDAYDOMAIN_INTEGERTIMEOFDAYFORMAT, - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='format', full_name='tensorflow.metadata.v0.TimeOfDayDomain.format', - index=0, containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[]), - ], - serialized_start=4434, - serialized_end=4643, -) - - -_FEATUREPRESENCE = _descriptor.Descriptor( - name='FeaturePresence', - full_name='tensorflow.metadata.v0.FeaturePresence', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='min_fraction', full_name='tensorflow.metadata.v0.FeaturePresence.min_fraction', index=0, - number=1, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='min_count', full_name='tensorflow.metadata.v0.FeaturePresence.min_count', index=1, - number=2, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=4645, - serialized_end=4703, -) - - -_FEATUREPRESENCEWITHINGROUP = _descriptor.Descriptor( - name='FeaturePresenceWithinGroup', - full_name='tensorflow.metadata.v0.FeaturePresenceWithinGroup', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='required', full_name='tensorflow.metadata.v0.FeaturePresenceWithinGroup.required', index=0, - number=1, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=4705, - serialized_end=4751, -) - - -_INFINITYNORM = _descriptor.Descriptor( - name='InfinityNorm', - full_name='tensorflow.metadata.v0.InfinityNorm', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='threshold', full_name='tensorflow.metadata.v0.InfinityNorm.threshold', index=0, - number=1, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=4753, - serialized_end=4786, -) - - -_FEATURECOMPARATOR = _descriptor.Descriptor( - name='FeatureComparator', - full_name='tensorflow.metadata.v0.FeatureComparator', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='infinity_norm', full_name='tensorflow.metadata.v0.FeatureComparator.infinity_norm', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=4788, - serialized_end=4868, -) - - -_TENSORREPRESENTATION_DEFAULTVALUE = _descriptor.Descriptor( - name='DefaultValue', - full_name='tensorflow.metadata.v0.TensorRepresentation.DefaultValue', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='float_value', full_name='tensorflow.metadata.v0.TensorRepresentation.DefaultValue.float_value', index=0, - number=1, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='int_value', full_name='tensorflow.metadata.v0.TensorRepresentation.DefaultValue.int_value', index=1, - number=2, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='bytes_value', full_name='tensorflow.metadata.v0.TensorRepresentation.DefaultValue.bytes_value', index=2, - number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=b"", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='uint_value', full_name='tensorflow.metadata.v0.TensorRepresentation.DefaultValue.uint_value', index=3, - number=4, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='kind', full_name='tensorflow.metadata.v0.TensorRepresentation.DefaultValue.kind', - index=0, containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[]), - ], - serialized_start=5158, - serialized_end=5269, -) - -_TENSORREPRESENTATION_DENSETENSOR = _descriptor.Descriptor( - name='DenseTensor', - full_name='tensorflow.metadata.v0.TensorRepresentation.DenseTensor', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='column_name', full_name='tensorflow.metadata.v0.TensorRepresentation.DenseTensor.column_name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='shape', full_name='tensorflow.metadata.v0.TensorRepresentation.DenseTensor.shape', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='default_value', full_name='tensorflow.metadata.v0.TensorRepresentation.DenseTensor.default_value', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=5272, - serialized_end=5439, -) - -_TENSORREPRESENTATION_VARLENSPARSETENSOR = _descriptor.Descriptor( - name='VarLenSparseTensor', - full_name='tensorflow.metadata.v0.TensorRepresentation.VarLenSparseTensor', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='column_name', full_name='tensorflow.metadata.v0.TensorRepresentation.VarLenSparseTensor.column_name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=5441, - serialized_end=5482, -) - -_TENSORREPRESENTATION_SPARSETENSOR = _descriptor.Descriptor( - name='SparseTensor', - full_name='tensorflow.metadata.v0.TensorRepresentation.SparseTensor', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='dense_shape', full_name='tensorflow.metadata.v0.TensorRepresentation.SparseTensor.dense_shape', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='index_column_names', full_name='tensorflow.metadata.v0.TensorRepresentation.SparseTensor.index_column_names', index=1, - number=2, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='value_column_name', full_name='tensorflow.metadata.v0.TensorRepresentation.SparseTensor.value_column_name', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=5484, - serialized_end=5610, -) - -_TENSORREPRESENTATION = _descriptor.Descriptor( - name='TensorRepresentation', - full_name='tensorflow.metadata.v0.TensorRepresentation', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='dense_tensor', full_name='tensorflow.metadata.v0.TensorRepresentation.dense_tensor', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='varlen_sparse_tensor', full_name='tensorflow.metadata.v0.TensorRepresentation.varlen_sparse_tensor', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='sparse_tensor', full_name='tensorflow.metadata.v0.TensorRepresentation.sparse_tensor', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[_TENSORREPRESENTATION_DEFAULTVALUE, _TENSORREPRESENTATION_DENSETENSOR, _TENSORREPRESENTATION_VARLENSPARSETENSOR, _TENSORREPRESENTATION_SPARSETENSOR, ], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='kind', full_name='tensorflow.metadata.v0.TensorRepresentation.kind', - index=0, containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[]), - ], - serialized_start=4871, - serialized_end=5618, -) - - -_TENSORREPRESENTATIONGROUP_TENSORREPRESENTATIONENTRY = _descriptor.Descriptor( - name='TensorRepresentationEntry', - full_name='tensorflow.metadata.v0.TensorRepresentationGroup.TensorRepresentationEntry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='tensorflow.metadata.v0.TensorRepresentationGroup.TensorRepresentationEntry.key', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='value', full_name='tensorflow.metadata.v0.TensorRepresentationGroup.TensorRepresentationEntry.value', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=b'8\001', - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=5758, - serialized_end=5863, -) - -_TENSORREPRESENTATIONGROUP = _descriptor.Descriptor( - name='TensorRepresentationGroup', - full_name='tensorflow.metadata.v0.TensorRepresentationGroup', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='tensor_representation', full_name='tensorflow.metadata.v0.TensorRepresentationGroup.tensor_representation', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[_TENSORREPRESENTATIONGROUP_TENSORREPRESENTATIONENTRY, ], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=5621, - serialized_end=5863, -) - -_SCHEMA_TENSORREPRESENTATIONGROUPENTRY.fields_by_name['value'].message_type = _TENSORREPRESENTATIONGROUP -_SCHEMA_TENSORREPRESENTATIONGROUPENTRY.containing_type = _SCHEMA -_SCHEMA.fields_by_name['feature'].message_type = _FEATURE -_SCHEMA.fields_by_name['sparse_feature'].message_type = _SPARSEFEATURE -_SCHEMA.fields_by_name['weighted_feature'].message_type = _WEIGHTEDFEATURE -_SCHEMA.fields_by_name['string_domain'].message_type = _STRINGDOMAIN -_SCHEMA.fields_by_name['float_domain'].message_type = _FLOATDOMAIN -_SCHEMA.fields_by_name['int_domain'].message_type = _INTDOMAIN -_SCHEMA.fields_by_name['annotation'].message_type = _ANNOTATION -_SCHEMA.fields_by_name['dataset_constraints'].message_type = _DATASETCONSTRAINTS -_SCHEMA.fields_by_name['tensor_representation_group'].message_type = _SCHEMA_TENSORREPRESENTATIONGROUPENTRY -_FEATURE.fields_by_name['presence'].message_type = _FEATUREPRESENCE -_FEATURE.fields_by_name['group_presence'].message_type = _FEATUREPRESENCEWITHINGROUP -_FEATURE.fields_by_name['shape'].message_type = _FIXEDSHAPE -_FEATURE.fields_by_name['value_count'].message_type = _VALUECOUNT -_FEATURE.fields_by_name['type'].enum_type = _FEATURETYPE -_FEATURE.fields_by_name['int_domain'].message_type = _INTDOMAIN -_FEATURE.fields_by_name['float_domain'].message_type = _FLOATDOMAIN -_FEATURE.fields_by_name['string_domain'].message_type = _STRINGDOMAIN -_FEATURE.fields_by_name['bool_domain'].message_type = _BOOLDOMAIN -_FEATURE.fields_by_name['struct_domain'].message_type = _STRUCTDOMAIN -_FEATURE.fields_by_name['natural_language_domain'].message_type = _NATURALLANGUAGEDOMAIN -_FEATURE.fields_by_name['image_domain'].message_type = _IMAGEDOMAIN -_FEATURE.fields_by_name['mid_domain'].message_type = _MIDDOMAIN -_FEATURE.fields_by_name['url_domain'].message_type = _URLDOMAIN -_FEATURE.fields_by_name['time_domain'].message_type = _TIMEDOMAIN -_FEATURE.fields_by_name['time_of_day_domain'].message_type = _TIMEOFDAYDOMAIN -_FEATURE.fields_by_name['distribution_constraints'].message_type = _DISTRIBUTIONCONSTRAINTS -_FEATURE.fields_by_name['annotation'].message_type = _ANNOTATION -_FEATURE.fields_by_name['skew_comparator'].message_type = _FEATURECOMPARATOR -_FEATURE.fields_by_name['drift_comparator'].message_type = _FEATURECOMPARATOR -_FEATURE.fields_by_name['lifecycle_stage'].enum_type = _LIFECYCLESTAGE -_FEATURE.oneofs_by_name['presence_constraints'].fields.append( - _FEATURE.fields_by_name['presence']) -_FEATURE.fields_by_name['presence'].containing_oneof = _FEATURE.oneofs_by_name['presence_constraints'] -_FEATURE.oneofs_by_name['presence_constraints'].fields.append( - _FEATURE.fields_by_name['group_presence']) -_FEATURE.fields_by_name['group_presence'].containing_oneof = _FEATURE.oneofs_by_name['presence_constraints'] -_FEATURE.oneofs_by_name['shape_type'].fields.append( - _FEATURE.fields_by_name['shape']) -_FEATURE.fields_by_name['shape'].containing_oneof = _FEATURE.oneofs_by_name['shape_type'] -_FEATURE.oneofs_by_name['shape_type'].fields.append( - _FEATURE.fields_by_name['value_count']) -_FEATURE.fields_by_name['value_count'].containing_oneof = _FEATURE.oneofs_by_name['shape_type'] -_FEATURE.oneofs_by_name['domain_info'].fields.append( - _FEATURE.fields_by_name['domain']) -_FEATURE.fields_by_name['domain'].containing_oneof = _FEATURE.oneofs_by_name['domain_info'] -_FEATURE.oneofs_by_name['domain_info'].fields.append( - _FEATURE.fields_by_name['int_domain']) -_FEATURE.fields_by_name['int_domain'].containing_oneof = _FEATURE.oneofs_by_name['domain_info'] -_FEATURE.oneofs_by_name['domain_info'].fields.append( - _FEATURE.fields_by_name['float_domain']) -_FEATURE.fields_by_name['float_domain'].containing_oneof = _FEATURE.oneofs_by_name['domain_info'] -_FEATURE.oneofs_by_name['domain_info'].fields.append( - _FEATURE.fields_by_name['string_domain']) -_FEATURE.fields_by_name['string_domain'].containing_oneof = _FEATURE.oneofs_by_name['domain_info'] -_FEATURE.oneofs_by_name['domain_info'].fields.append( - _FEATURE.fields_by_name['bool_domain']) -_FEATURE.fields_by_name['bool_domain'].containing_oneof = _FEATURE.oneofs_by_name['domain_info'] -_FEATURE.oneofs_by_name['domain_info'].fields.append( - _FEATURE.fields_by_name['struct_domain']) -_FEATURE.fields_by_name['struct_domain'].containing_oneof = _FEATURE.oneofs_by_name['domain_info'] -_FEATURE.oneofs_by_name['domain_info'].fields.append( - _FEATURE.fields_by_name['natural_language_domain']) -_FEATURE.fields_by_name['natural_language_domain'].containing_oneof = _FEATURE.oneofs_by_name['domain_info'] -_FEATURE.oneofs_by_name['domain_info'].fields.append( - _FEATURE.fields_by_name['image_domain']) -_FEATURE.fields_by_name['image_domain'].containing_oneof = _FEATURE.oneofs_by_name['domain_info'] -_FEATURE.oneofs_by_name['domain_info'].fields.append( - _FEATURE.fields_by_name['mid_domain']) -_FEATURE.fields_by_name['mid_domain'].containing_oneof = _FEATURE.oneofs_by_name['domain_info'] -_FEATURE.oneofs_by_name['domain_info'].fields.append( - _FEATURE.fields_by_name['url_domain']) -_FEATURE.fields_by_name['url_domain'].containing_oneof = _FEATURE.oneofs_by_name['domain_info'] -_FEATURE.oneofs_by_name['domain_info'].fields.append( - _FEATURE.fields_by_name['time_domain']) -_FEATURE.fields_by_name['time_domain'].containing_oneof = _FEATURE.oneofs_by_name['domain_info'] -_FEATURE.oneofs_by_name['domain_info'].fields.append( - _FEATURE.fields_by_name['time_of_day_domain']) -_FEATURE.fields_by_name['time_of_day_domain'].containing_oneof = _FEATURE.oneofs_by_name['domain_info'] -_ANNOTATION.fields_by_name['extra_metadata'].message_type = google_dot_protobuf_dot_any__pb2._ANY -_DATASETCONSTRAINTS.fields_by_name['num_examples_drift_comparator'].message_type = _NUMERICVALUECOMPARATOR -_DATASETCONSTRAINTS.fields_by_name['num_examples_version_comparator'].message_type = _NUMERICVALUECOMPARATOR -_FIXEDSHAPE_DIM.containing_type = _FIXEDSHAPE -_FIXEDSHAPE.fields_by_name['dim'].message_type = _FIXEDSHAPE_DIM -_WEIGHTEDFEATURE.fields_by_name['feature'].message_type = tensorflow__metadata_dot_proto_dot_v0_dot_path__pb2._PATH -_WEIGHTEDFEATURE.fields_by_name['weight_feature'].message_type = tensorflow__metadata_dot_proto_dot_v0_dot_path__pb2._PATH -_WEIGHTEDFEATURE.fields_by_name['lifecycle_stage'].enum_type = _LIFECYCLESTAGE -_SPARSEFEATURE_INDEXFEATURE.containing_type = _SPARSEFEATURE -_SPARSEFEATURE_VALUEFEATURE.containing_type = _SPARSEFEATURE -_SPARSEFEATURE.fields_by_name['lifecycle_stage'].enum_type = _LIFECYCLESTAGE -_SPARSEFEATURE.fields_by_name['presence'].message_type = _FEATUREPRESENCE -_SPARSEFEATURE.fields_by_name['dense_shape'].message_type = _FIXEDSHAPE -_SPARSEFEATURE.fields_by_name['index_feature'].message_type = _SPARSEFEATURE_INDEXFEATURE -_SPARSEFEATURE.fields_by_name['value_feature'].message_type = _SPARSEFEATURE_VALUEFEATURE -_SPARSEFEATURE.fields_by_name['type'].enum_type = _FEATURETYPE -_STRUCTDOMAIN.fields_by_name['feature'].message_type = _FEATURE -_STRUCTDOMAIN.fields_by_name['sparse_feature'].message_type = _SPARSEFEATURE -_TIMEDOMAIN.fields_by_name['integer_format'].enum_type = _TIMEDOMAIN_INTEGERTIMEFORMAT -_TIMEDOMAIN_INTEGERTIMEFORMAT.containing_type = _TIMEDOMAIN -_TIMEDOMAIN.oneofs_by_name['format'].fields.append( - _TIMEDOMAIN.fields_by_name['string_format']) -_TIMEDOMAIN.fields_by_name['string_format'].containing_oneof = _TIMEDOMAIN.oneofs_by_name['format'] -_TIMEDOMAIN.oneofs_by_name['format'].fields.append( - _TIMEDOMAIN.fields_by_name['integer_format']) -_TIMEDOMAIN.fields_by_name['integer_format'].containing_oneof = _TIMEDOMAIN.oneofs_by_name['format'] -_TIMEOFDAYDOMAIN.fields_by_name['integer_format'].enum_type = _TIMEOFDAYDOMAIN_INTEGERTIMEOFDAYFORMAT -_TIMEOFDAYDOMAIN_INTEGERTIMEOFDAYFORMAT.containing_type = _TIMEOFDAYDOMAIN -_TIMEOFDAYDOMAIN.oneofs_by_name['format'].fields.append( - _TIMEOFDAYDOMAIN.fields_by_name['string_format']) -_TIMEOFDAYDOMAIN.fields_by_name['string_format'].containing_oneof = _TIMEOFDAYDOMAIN.oneofs_by_name['format'] -_TIMEOFDAYDOMAIN.oneofs_by_name['format'].fields.append( - _TIMEOFDAYDOMAIN.fields_by_name['integer_format']) -_TIMEOFDAYDOMAIN.fields_by_name['integer_format'].containing_oneof = _TIMEOFDAYDOMAIN.oneofs_by_name['format'] -_FEATURECOMPARATOR.fields_by_name['infinity_norm'].message_type = _INFINITYNORM -_TENSORREPRESENTATION_DEFAULTVALUE.containing_type = _TENSORREPRESENTATION -_TENSORREPRESENTATION_DEFAULTVALUE.oneofs_by_name['kind'].fields.append( - _TENSORREPRESENTATION_DEFAULTVALUE.fields_by_name['float_value']) -_TENSORREPRESENTATION_DEFAULTVALUE.fields_by_name['float_value'].containing_oneof = _TENSORREPRESENTATION_DEFAULTVALUE.oneofs_by_name['kind'] -_TENSORREPRESENTATION_DEFAULTVALUE.oneofs_by_name['kind'].fields.append( - _TENSORREPRESENTATION_DEFAULTVALUE.fields_by_name['int_value']) -_TENSORREPRESENTATION_DEFAULTVALUE.fields_by_name['int_value'].containing_oneof = _TENSORREPRESENTATION_DEFAULTVALUE.oneofs_by_name['kind'] -_TENSORREPRESENTATION_DEFAULTVALUE.oneofs_by_name['kind'].fields.append( - _TENSORREPRESENTATION_DEFAULTVALUE.fields_by_name['bytes_value']) -_TENSORREPRESENTATION_DEFAULTVALUE.fields_by_name['bytes_value'].containing_oneof = _TENSORREPRESENTATION_DEFAULTVALUE.oneofs_by_name['kind'] -_TENSORREPRESENTATION_DEFAULTVALUE.oneofs_by_name['kind'].fields.append( - _TENSORREPRESENTATION_DEFAULTVALUE.fields_by_name['uint_value']) -_TENSORREPRESENTATION_DEFAULTVALUE.fields_by_name['uint_value'].containing_oneof = _TENSORREPRESENTATION_DEFAULTVALUE.oneofs_by_name['kind'] -_TENSORREPRESENTATION_DENSETENSOR.fields_by_name['shape'].message_type = _FIXEDSHAPE -_TENSORREPRESENTATION_DENSETENSOR.fields_by_name['default_value'].message_type = _TENSORREPRESENTATION_DEFAULTVALUE -_TENSORREPRESENTATION_DENSETENSOR.containing_type = _TENSORREPRESENTATION -_TENSORREPRESENTATION_VARLENSPARSETENSOR.containing_type = _TENSORREPRESENTATION -_TENSORREPRESENTATION_SPARSETENSOR.fields_by_name['dense_shape'].message_type = _FIXEDSHAPE -_TENSORREPRESENTATION_SPARSETENSOR.containing_type = _TENSORREPRESENTATION -_TENSORREPRESENTATION.fields_by_name['dense_tensor'].message_type = _TENSORREPRESENTATION_DENSETENSOR -_TENSORREPRESENTATION.fields_by_name['varlen_sparse_tensor'].message_type = _TENSORREPRESENTATION_VARLENSPARSETENSOR -_TENSORREPRESENTATION.fields_by_name['sparse_tensor'].message_type = _TENSORREPRESENTATION_SPARSETENSOR -_TENSORREPRESENTATION.oneofs_by_name['kind'].fields.append( - _TENSORREPRESENTATION.fields_by_name['dense_tensor']) -_TENSORREPRESENTATION.fields_by_name['dense_tensor'].containing_oneof = _TENSORREPRESENTATION.oneofs_by_name['kind'] -_TENSORREPRESENTATION.oneofs_by_name['kind'].fields.append( - _TENSORREPRESENTATION.fields_by_name['varlen_sparse_tensor']) -_TENSORREPRESENTATION.fields_by_name['varlen_sparse_tensor'].containing_oneof = _TENSORREPRESENTATION.oneofs_by_name['kind'] -_TENSORREPRESENTATION.oneofs_by_name['kind'].fields.append( - _TENSORREPRESENTATION.fields_by_name['sparse_tensor']) -_TENSORREPRESENTATION.fields_by_name['sparse_tensor'].containing_oneof = _TENSORREPRESENTATION.oneofs_by_name['kind'] -_TENSORREPRESENTATIONGROUP_TENSORREPRESENTATIONENTRY.fields_by_name['value'].message_type = _TENSORREPRESENTATION -_TENSORREPRESENTATIONGROUP_TENSORREPRESENTATIONENTRY.containing_type = _TENSORREPRESENTATIONGROUP -_TENSORREPRESENTATIONGROUP.fields_by_name['tensor_representation'].message_type = _TENSORREPRESENTATIONGROUP_TENSORREPRESENTATIONENTRY -DESCRIPTOR.message_types_by_name['Schema'] = _SCHEMA -DESCRIPTOR.message_types_by_name['Feature'] = _FEATURE -DESCRIPTOR.message_types_by_name['Annotation'] = _ANNOTATION -DESCRIPTOR.message_types_by_name['NumericValueComparator'] = _NUMERICVALUECOMPARATOR -DESCRIPTOR.message_types_by_name['DatasetConstraints'] = _DATASETCONSTRAINTS -DESCRIPTOR.message_types_by_name['FixedShape'] = _FIXEDSHAPE -DESCRIPTOR.message_types_by_name['ValueCount'] = _VALUECOUNT -DESCRIPTOR.message_types_by_name['WeightedFeature'] = _WEIGHTEDFEATURE -DESCRIPTOR.message_types_by_name['SparseFeature'] = _SPARSEFEATURE -DESCRIPTOR.message_types_by_name['DistributionConstraints'] = _DISTRIBUTIONCONSTRAINTS -DESCRIPTOR.message_types_by_name['IntDomain'] = _INTDOMAIN -DESCRIPTOR.message_types_by_name['FloatDomain'] = _FLOATDOMAIN -DESCRIPTOR.message_types_by_name['StructDomain'] = _STRUCTDOMAIN -DESCRIPTOR.message_types_by_name['StringDomain'] = _STRINGDOMAIN -DESCRIPTOR.message_types_by_name['BoolDomain'] = _BOOLDOMAIN -DESCRIPTOR.message_types_by_name['NaturalLanguageDomain'] = _NATURALLANGUAGEDOMAIN -DESCRIPTOR.message_types_by_name['ImageDomain'] = _IMAGEDOMAIN -DESCRIPTOR.message_types_by_name['MIDDomain'] = _MIDDOMAIN -DESCRIPTOR.message_types_by_name['URLDomain'] = _URLDOMAIN -DESCRIPTOR.message_types_by_name['TimeDomain'] = _TIMEDOMAIN -DESCRIPTOR.message_types_by_name['TimeOfDayDomain'] = _TIMEOFDAYDOMAIN -DESCRIPTOR.message_types_by_name['FeaturePresence'] = _FEATUREPRESENCE -DESCRIPTOR.message_types_by_name['FeaturePresenceWithinGroup'] = _FEATUREPRESENCEWITHINGROUP -DESCRIPTOR.message_types_by_name['InfinityNorm'] = _INFINITYNORM -DESCRIPTOR.message_types_by_name['FeatureComparator'] = _FEATURECOMPARATOR -DESCRIPTOR.message_types_by_name['TensorRepresentation'] = _TENSORREPRESENTATION -DESCRIPTOR.message_types_by_name['TensorRepresentationGroup'] = _TENSORREPRESENTATIONGROUP -DESCRIPTOR.enum_types_by_name['LifecycleStage'] = _LIFECYCLESTAGE -DESCRIPTOR.enum_types_by_name['FeatureType'] = _FEATURETYPE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -Schema = _reflection.GeneratedProtocolMessageType('Schema', (_message.Message,), { - - 'TensorRepresentationGroupEntry' : _reflection.GeneratedProtocolMessageType('TensorRepresentationGroupEntry', (_message.Message,), { - 'DESCRIPTOR' : _SCHEMA_TENSORREPRESENTATIONGROUPENTRY, - '__module__' : 'tensorflow_metadata.proto.v0.schema_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.Schema.TensorRepresentationGroupEntry) - }) - , - 'DESCRIPTOR' : _SCHEMA, - '__module__' : 'tensorflow_metadata.proto.v0.schema_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.Schema) - }) -_sym_db.RegisterMessage(Schema) -_sym_db.RegisterMessage(Schema.TensorRepresentationGroupEntry) - -Feature = _reflection.GeneratedProtocolMessageType('Feature', (_message.Message,), { - 'DESCRIPTOR' : _FEATURE, - '__module__' : 'tensorflow_metadata.proto.v0.schema_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.Feature) - }) -_sym_db.RegisterMessage(Feature) - -Annotation = _reflection.GeneratedProtocolMessageType('Annotation', (_message.Message,), { - 'DESCRIPTOR' : _ANNOTATION, - '__module__' : 'tensorflow_metadata.proto.v0.schema_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.Annotation) - }) -_sym_db.RegisterMessage(Annotation) - -NumericValueComparator = _reflection.GeneratedProtocolMessageType('NumericValueComparator', (_message.Message,), { - 'DESCRIPTOR' : _NUMERICVALUECOMPARATOR, - '__module__' : 'tensorflow_metadata.proto.v0.schema_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.NumericValueComparator) - }) -_sym_db.RegisterMessage(NumericValueComparator) - -DatasetConstraints = _reflection.GeneratedProtocolMessageType('DatasetConstraints', (_message.Message,), { - 'DESCRIPTOR' : _DATASETCONSTRAINTS, - '__module__' : 'tensorflow_metadata.proto.v0.schema_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.DatasetConstraints) - }) -_sym_db.RegisterMessage(DatasetConstraints) - -FixedShape = _reflection.GeneratedProtocolMessageType('FixedShape', (_message.Message,), { - - 'Dim' : _reflection.GeneratedProtocolMessageType('Dim', (_message.Message,), { - 'DESCRIPTOR' : _FIXEDSHAPE_DIM, - '__module__' : 'tensorflow_metadata.proto.v0.schema_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.FixedShape.Dim) - }) - , - 'DESCRIPTOR' : _FIXEDSHAPE, - '__module__' : 'tensorflow_metadata.proto.v0.schema_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.FixedShape) - }) -_sym_db.RegisterMessage(FixedShape) -_sym_db.RegisterMessage(FixedShape.Dim) - -ValueCount = _reflection.GeneratedProtocolMessageType('ValueCount', (_message.Message,), { - 'DESCRIPTOR' : _VALUECOUNT, - '__module__' : 'tensorflow_metadata.proto.v0.schema_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.ValueCount) - }) -_sym_db.RegisterMessage(ValueCount) - -WeightedFeature = _reflection.GeneratedProtocolMessageType('WeightedFeature', (_message.Message,), { - 'DESCRIPTOR' : _WEIGHTEDFEATURE, - '__module__' : 'tensorflow_metadata.proto.v0.schema_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.WeightedFeature) - }) -_sym_db.RegisterMessage(WeightedFeature) - -SparseFeature = _reflection.GeneratedProtocolMessageType('SparseFeature', (_message.Message,), { - - 'IndexFeature' : _reflection.GeneratedProtocolMessageType('IndexFeature', (_message.Message,), { - 'DESCRIPTOR' : _SPARSEFEATURE_INDEXFEATURE, - '__module__' : 'tensorflow_metadata.proto.v0.schema_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.SparseFeature.IndexFeature) - }) - , - - 'ValueFeature' : _reflection.GeneratedProtocolMessageType('ValueFeature', (_message.Message,), { - 'DESCRIPTOR' : _SPARSEFEATURE_VALUEFEATURE, - '__module__' : 'tensorflow_metadata.proto.v0.schema_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.SparseFeature.ValueFeature) - }) - , - 'DESCRIPTOR' : _SPARSEFEATURE, - '__module__' : 'tensorflow_metadata.proto.v0.schema_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.SparseFeature) - }) -_sym_db.RegisterMessage(SparseFeature) -_sym_db.RegisterMessage(SparseFeature.IndexFeature) -_sym_db.RegisterMessage(SparseFeature.ValueFeature) - -DistributionConstraints = _reflection.GeneratedProtocolMessageType('DistributionConstraints', (_message.Message,), { - 'DESCRIPTOR' : _DISTRIBUTIONCONSTRAINTS, - '__module__' : 'tensorflow_metadata.proto.v0.schema_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.DistributionConstraints) - }) -_sym_db.RegisterMessage(DistributionConstraints) - -IntDomain = _reflection.GeneratedProtocolMessageType('IntDomain', (_message.Message,), { - 'DESCRIPTOR' : _INTDOMAIN, - '__module__' : 'tensorflow_metadata.proto.v0.schema_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.IntDomain) - }) -_sym_db.RegisterMessage(IntDomain) - -FloatDomain = _reflection.GeneratedProtocolMessageType('FloatDomain', (_message.Message,), { - 'DESCRIPTOR' : _FLOATDOMAIN, - '__module__' : 'tensorflow_metadata.proto.v0.schema_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.FloatDomain) - }) -_sym_db.RegisterMessage(FloatDomain) - -StructDomain = _reflection.GeneratedProtocolMessageType('StructDomain', (_message.Message,), { - 'DESCRIPTOR' : _STRUCTDOMAIN, - '__module__' : 'tensorflow_metadata.proto.v0.schema_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.StructDomain) - }) -_sym_db.RegisterMessage(StructDomain) - -StringDomain = _reflection.GeneratedProtocolMessageType('StringDomain', (_message.Message,), { - 'DESCRIPTOR' : _STRINGDOMAIN, - '__module__' : 'tensorflow_metadata.proto.v0.schema_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.StringDomain) - }) -_sym_db.RegisterMessage(StringDomain) - -BoolDomain = _reflection.GeneratedProtocolMessageType('BoolDomain', (_message.Message,), { - 'DESCRIPTOR' : _BOOLDOMAIN, - '__module__' : 'tensorflow_metadata.proto.v0.schema_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.BoolDomain) - }) -_sym_db.RegisterMessage(BoolDomain) - -NaturalLanguageDomain = _reflection.GeneratedProtocolMessageType('NaturalLanguageDomain', (_message.Message,), { - 'DESCRIPTOR' : _NATURALLANGUAGEDOMAIN, - '__module__' : 'tensorflow_metadata.proto.v0.schema_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.NaturalLanguageDomain) - }) -_sym_db.RegisterMessage(NaturalLanguageDomain) - -ImageDomain = _reflection.GeneratedProtocolMessageType('ImageDomain', (_message.Message,), { - 'DESCRIPTOR' : _IMAGEDOMAIN, - '__module__' : 'tensorflow_metadata.proto.v0.schema_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.ImageDomain) - }) -_sym_db.RegisterMessage(ImageDomain) - -MIDDomain = _reflection.GeneratedProtocolMessageType('MIDDomain', (_message.Message,), { - 'DESCRIPTOR' : _MIDDOMAIN, - '__module__' : 'tensorflow_metadata.proto.v0.schema_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.MIDDomain) - }) -_sym_db.RegisterMessage(MIDDomain) - -URLDomain = _reflection.GeneratedProtocolMessageType('URLDomain', (_message.Message,), { - 'DESCRIPTOR' : _URLDOMAIN, - '__module__' : 'tensorflow_metadata.proto.v0.schema_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.URLDomain) - }) -_sym_db.RegisterMessage(URLDomain) - -TimeDomain = _reflection.GeneratedProtocolMessageType('TimeDomain', (_message.Message,), { - 'DESCRIPTOR' : _TIMEDOMAIN, - '__module__' : 'tensorflow_metadata.proto.v0.schema_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.TimeDomain) - }) -_sym_db.RegisterMessage(TimeDomain) - -TimeOfDayDomain = _reflection.GeneratedProtocolMessageType('TimeOfDayDomain', (_message.Message,), { - 'DESCRIPTOR' : _TIMEOFDAYDOMAIN, - '__module__' : 'tensorflow_metadata.proto.v0.schema_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.TimeOfDayDomain) - }) -_sym_db.RegisterMessage(TimeOfDayDomain) - -FeaturePresence = _reflection.GeneratedProtocolMessageType('FeaturePresence', (_message.Message,), { - 'DESCRIPTOR' : _FEATUREPRESENCE, - '__module__' : 'tensorflow_metadata.proto.v0.schema_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.FeaturePresence) - }) -_sym_db.RegisterMessage(FeaturePresence) - -FeaturePresenceWithinGroup = _reflection.GeneratedProtocolMessageType('FeaturePresenceWithinGroup', (_message.Message,), { - 'DESCRIPTOR' : _FEATUREPRESENCEWITHINGROUP, - '__module__' : 'tensorflow_metadata.proto.v0.schema_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.FeaturePresenceWithinGroup) - }) -_sym_db.RegisterMessage(FeaturePresenceWithinGroup) - -InfinityNorm = _reflection.GeneratedProtocolMessageType('InfinityNorm', (_message.Message,), { - 'DESCRIPTOR' : _INFINITYNORM, - '__module__' : 'tensorflow_metadata.proto.v0.schema_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.InfinityNorm) - }) -_sym_db.RegisterMessage(InfinityNorm) - -FeatureComparator = _reflection.GeneratedProtocolMessageType('FeatureComparator', (_message.Message,), { - 'DESCRIPTOR' : _FEATURECOMPARATOR, - '__module__' : 'tensorflow_metadata.proto.v0.schema_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.FeatureComparator) - }) -_sym_db.RegisterMessage(FeatureComparator) - -TensorRepresentation = _reflection.GeneratedProtocolMessageType('TensorRepresentation', (_message.Message,), { - - 'DefaultValue' : _reflection.GeneratedProtocolMessageType('DefaultValue', (_message.Message,), { - 'DESCRIPTOR' : _TENSORREPRESENTATION_DEFAULTVALUE, - '__module__' : 'tensorflow_metadata.proto.v0.schema_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.TensorRepresentation.DefaultValue) - }) - , - - 'DenseTensor' : _reflection.GeneratedProtocolMessageType('DenseTensor', (_message.Message,), { - 'DESCRIPTOR' : _TENSORREPRESENTATION_DENSETENSOR, - '__module__' : 'tensorflow_metadata.proto.v0.schema_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.TensorRepresentation.DenseTensor) - }) - , - - 'VarLenSparseTensor' : _reflection.GeneratedProtocolMessageType('VarLenSparseTensor', (_message.Message,), { - 'DESCRIPTOR' : _TENSORREPRESENTATION_VARLENSPARSETENSOR, - '__module__' : 'tensorflow_metadata.proto.v0.schema_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.TensorRepresentation.VarLenSparseTensor) - }) - , - - 'SparseTensor' : _reflection.GeneratedProtocolMessageType('SparseTensor', (_message.Message,), { - 'DESCRIPTOR' : _TENSORREPRESENTATION_SPARSETENSOR, - '__module__' : 'tensorflow_metadata.proto.v0.schema_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.TensorRepresentation.SparseTensor) - }) - , - 'DESCRIPTOR' : _TENSORREPRESENTATION, - '__module__' : 'tensorflow_metadata.proto.v0.schema_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.TensorRepresentation) - }) -_sym_db.RegisterMessage(TensorRepresentation) -_sym_db.RegisterMessage(TensorRepresentation.DefaultValue) -_sym_db.RegisterMessage(TensorRepresentation.DenseTensor) -_sym_db.RegisterMessage(TensorRepresentation.VarLenSparseTensor) -_sym_db.RegisterMessage(TensorRepresentation.SparseTensor) - -TensorRepresentationGroup = _reflection.GeneratedProtocolMessageType('TensorRepresentationGroup', (_message.Message,), { - - 'TensorRepresentationEntry' : _reflection.GeneratedProtocolMessageType('TensorRepresentationEntry', (_message.Message,), { - 'DESCRIPTOR' : _TENSORREPRESENTATIONGROUP_TENSORREPRESENTATIONENTRY, - '__module__' : 'tensorflow_metadata.proto.v0.schema_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.TensorRepresentationGroup.TensorRepresentationEntry) - }) - , - 'DESCRIPTOR' : _TENSORREPRESENTATIONGROUP, - '__module__' : 'tensorflow_metadata.proto.v0.schema_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.TensorRepresentationGroup) - }) -_sym_db.RegisterMessage(TensorRepresentationGroup) -_sym_db.RegisterMessage(TensorRepresentationGroup.TensorRepresentationEntry) - - -DESCRIPTOR._options = None -_SCHEMA_TENSORREPRESENTATIONGROUPENTRY._options = None -_FEATURE.fields_by_name['deprecated']._options = None -_SPARSEFEATURE.fields_by_name['deprecated']._options = None -_SPARSEFEATURE.fields_by_name['presence']._options = None -_SPARSEFEATURE.fields_by_name['type']._options = None -_TENSORREPRESENTATIONGROUP_TENSORREPRESENTATIONENTRY._options = None -# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/tensorflow_metadata/proto/v0/schema_pb2.pyi b/sdk/python/tensorflow_metadata/proto/v0/schema_pb2.pyi deleted file mode 100644 index f08b330a2f..0000000000 --- a/sdk/python/tensorflow_metadata/proto/v0/schema_pb2.pyi +++ /dev/null @@ -1,786 +0,0 @@ -""" -@generated by mypy-protobuf. Do not edit manually! -isort:skip_file -""" -from google.protobuf.any_pb2 import ( - Any as google___protobuf___any_pb2___Any, -) - -from google.protobuf.descriptor import ( - Descriptor as google___protobuf___descriptor___Descriptor, - EnumDescriptor as google___protobuf___descriptor___EnumDescriptor, - FileDescriptor as google___protobuf___descriptor___FileDescriptor, -) - -from google.protobuf.internal.containers import ( - MessageMap as google___protobuf___internal___containers___MessageMap, - RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer, - RepeatedScalarFieldContainer as google___protobuf___internal___containers___RepeatedScalarFieldContainer, -) - -from google.protobuf.internal.enum_type_wrapper import ( - _EnumTypeWrapper as google___protobuf___internal___enum_type_wrapper____EnumTypeWrapper, -) - -from google.protobuf.message import ( - Message as google___protobuf___message___Message, -) - -from tensorflow_metadata.proto.v0.path_pb2 import ( - Path as tensorflow_metadata___proto___v0___path_pb2___Path, -) - -from typing import ( - Iterable as typing___Iterable, - Mapping as typing___Mapping, - NewType as typing___NewType, - Optional as typing___Optional, - Text as typing___Text, - cast as typing___cast, - overload as typing___overload, -) - -from typing_extensions import ( - Literal as typing_extensions___Literal, -) - - -builtin___bool = bool -builtin___bytes = bytes -builtin___float = float -builtin___int = int - - -DESCRIPTOR: google___protobuf___descriptor___FileDescriptor = ... - -LifecycleStageValue = typing___NewType('LifecycleStageValue', builtin___int) -type___LifecycleStageValue = LifecycleStageValue -LifecycleStage: _LifecycleStage -class _LifecycleStage(google___protobuf___internal___enum_type_wrapper____EnumTypeWrapper[LifecycleStageValue]): - DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ... - UNKNOWN_STAGE = typing___cast(LifecycleStageValue, 0) - PLANNED = typing___cast(LifecycleStageValue, 1) - ALPHA = typing___cast(LifecycleStageValue, 2) - BETA = typing___cast(LifecycleStageValue, 3) - PRODUCTION = typing___cast(LifecycleStageValue, 4) - DEPRECATED = typing___cast(LifecycleStageValue, 5) - DEBUG_ONLY = typing___cast(LifecycleStageValue, 6) -UNKNOWN_STAGE = typing___cast(LifecycleStageValue, 0) -PLANNED = typing___cast(LifecycleStageValue, 1) -ALPHA = typing___cast(LifecycleStageValue, 2) -BETA = typing___cast(LifecycleStageValue, 3) -PRODUCTION = typing___cast(LifecycleStageValue, 4) -DEPRECATED = typing___cast(LifecycleStageValue, 5) -DEBUG_ONLY = typing___cast(LifecycleStageValue, 6) - -FeatureTypeValue = typing___NewType('FeatureTypeValue', builtin___int) -type___FeatureTypeValue = FeatureTypeValue -FeatureType: _FeatureType -class _FeatureType(google___protobuf___internal___enum_type_wrapper____EnumTypeWrapper[FeatureTypeValue]): - DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ... - TYPE_UNKNOWN = typing___cast(FeatureTypeValue, 0) - BYTES = typing___cast(FeatureTypeValue, 1) - INT = typing___cast(FeatureTypeValue, 2) - FLOAT = typing___cast(FeatureTypeValue, 3) - STRUCT = typing___cast(FeatureTypeValue, 4) -TYPE_UNKNOWN = typing___cast(FeatureTypeValue, 0) -BYTES = typing___cast(FeatureTypeValue, 1) -INT = typing___cast(FeatureTypeValue, 2) -FLOAT = typing___cast(FeatureTypeValue, 3) -STRUCT = typing___cast(FeatureTypeValue, 4) - -class Schema(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - class TensorRepresentationGroupEntry(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - key: typing___Text = ... - - @property - def value(self) -> type___TensorRepresentationGroup: ... - - def __init__(self, - *, - key : typing___Optional[typing___Text] = None, - value : typing___Optional[type___TensorRepresentationGroup] = None, - ) -> None: ... - def HasField(self, field_name: typing_extensions___Literal[u"key",b"key",u"value",b"value"]) -> builtin___bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"key",b"key",u"value",b"value"]) -> None: ... - type___TensorRepresentationGroupEntry = TensorRepresentationGroupEntry - - default_environment: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] = ... - - @property - def feature(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[type___Feature]: ... - - @property - def sparse_feature(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[type___SparseFeature]: ... - - @property - def weighted_feature(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[type___WeightedFeature]: ... - - @property - def string_domain(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[type___StringDomain]: ... - - @property - def float_domain(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[type___FloatDomain]: ... - - @property - def int_domain(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[type___IntDomain]: ... - - @property - def annotation(self) -> type___Annotation: ... - - @property - def dataset_constraints(self) -> type___DatasetConstraints: ... - - @property - def tensor_representation_group(self) -> google___protobuf___internal___containers___MessageMap[typing___Text, type___TensorRepresentationGroup]: ... - - def __init__(self, - *, - feature : typing___Optional[typing___Iterable[type___Feature]] = None, - sparse_feature : typing___Optional[typing___Iterable[type___SparseFeature]] = None, - weighted_feature : typing___Optional[typing___Iterable[type___WeightedFeature]] = None, - string_domain : typing___Optional[typing___Iterable[type___StringDomain]] = None, - float_domain : typing___Optional[typing___Iterable[type___FloatDomain]] = None, - int_domain : typing___Optional[typing___Iterable[type___IntDomain]] = None, - default_environment : typing___Optional[typing___Iterable[typing___Text]] = None, - annotation : typing___Optional[type___Annotation] = None, - dataset_constraints : typing___Optional[type___DatasetConstraints] = None, - tensor_representation_group : typing___Optional[typing___Mapping[typing___Text, type___TensorRepresentationGroup]] = None, - ) -> None: ... - def HasField(self, field_name: typing_extensions___Literal[u"annotation",b"annotation",u"dataset_constraints",b"dataset_constraints"]) -> builtin___bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"annotation",b"annotation",u"dataset_constraints",b"dataset_constraints",u"default_environment",b"default_environment",u"feature",b"feature",u"float_domain",b"float_domain",u"int_domain",b"int_domain",u"sparse_feature",b"sparse_feature",u"string_domain",b"string_domain",u"tensor_representation_group",b"tensor_representation_group",u"weighted_feature",b"weighted_feature"]) -> None: ... -type___Schema = Schema - -class Feature(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - name: typing___Text = ... - deprecated: builtin___bool = ... - type: type___FeatureTypeValue = ... - domain: typing___Text = ... - in_environment: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] = ... - not_in_environment: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] = ... - lifecycle_stage: type___LifecycleStageValue = ... - - @property - def presence(self) -> type___FeaturePresence: ... - - @property - def group_presence(self) -> type___FeaturePresenceWithinGroup: ... - - @property - def shape(self) -> type___FixedShape: ... - - @property - def value_count(self) -> type___ValueCount: ... - - @property - def int_domain(self) -> type___IntDomain: ... - - @property - def float_domain(self) -> type___FloatDomain: ... - - @property - def string_domain(self) -> type___StringDomain: ... - - @property - def bool_domain(self) -> type___BoolDomain: ... - - @property - def struct_domain(self) -> type___StructDomain: ... - - @property - def natural_language_domain(self) -> type___NaturalLanguageDomain: ... - - @property - def image_domain(self) -> type___ImageDomain: ... - - @property - def mid_domain(self) -> type___MIDDomain: ... - - @property - def url_domain(self) -> type___URLDomain: ... - - @property - def time_domain(self) -> type___TimeDomain: ... - - @property - def time_of_day_domain(self) -> type___TimeOfDayDomain: ... - - @property - def distribution_constraints(self) -> type___DistributionConstraints: ... - - @property - def annotation(self) -> type___Annotation: ... - - @property - def skew_comparator(self) -> type___FeatureComparator: ... - - @property - def drift_comparator(self) -> type___FeatureComparator: ... - - def __init__(self, - *, - name : typing___Optional[typing___Text] = None, - deprecated : typing___Optional[builtin___bool] = None, - presence : typing___Optional[type___FeaturePresence] = None, - group_presence : typing___Optional[type___FeaturePresenceWithinGroup] = None, - shape : typing___Optional[type___FixedShape] = None, - value_count : typing___Optional[type___ValueCount] = None, - type : typing___Optional[type___FeatureTypeValue] = None, - domain : typing___Optional[typing___Text] = None, - int_domain : typing___Optional[type___IntDomain] = None, - float_domain : typing___Optional[type___FloatDomain] = None, - string_domain : typing___Optional[type___StringDomain] = None, - bool_domain : typing___Optional[type___BoolDomain] = None, - struct_domain : typing___Optional[type___StructDomain] = None, - natural_language_domain : typing___Optional[type___NaturalLanguageDomain] = None, - image_domain : typing___Optional[type___ImageDomain] = None, - mid_domain : typing___Optional[type___MIDDomain] = None, - url_domain : typing___Optional[type___URLDomain] = None, - time_domain : typing___Optional[type___TimeDomain] = None, - time_of_day_domain : typing___Optional[type___TimeOfDayDomain] = None, - distribution_constraints : typing___Optional[type___DistributionConstraints] = None, - annotation : typing___Optional[type___Annotation] = None, - skew_comparator : typing___Optional[type___FeatureComparator] = None, - drift_comparator : typing___Optional[type___FeatureComparator] = None, - in_environment : typing___Optional[typing___Iterable[typing___Text]] = None, - not_in_environment : typing___Optional[typing___Iterable[typing___Text]] = None, - lifecycle_stage : typing___Optional[type___LifecycleStageValue] = None, - ) -> None: ... - def HasField(self, field_name: typing_extensions___Literal[u"annotation",b"annotation",u"bool_domain",b"bool_domain",u"deprecated",b"deprecated",u"distribution_constraints",b"distribution_constraints",u"domain",b"domain",u"domain_info",b"domain_info",u"drift_comparator",b"drift_comparator",u"float_domain",b"float_domain",u"group_presence",b"group_presence",u"image_domain",b"image_domain",u"int_domain",b"int_domain",u"lifecycle_stage",b"lifecycle_stage",u"mid_domain",b"mid_domain",u"name",b"name",u"natural_language_domain",b"natural_language_domain",u"presence",b"presence",u"presence_constraints",b"presence_constraints",u"shape",b"shape",u"shape_type",b"shape_type",u"skew_comparator",b"skew_comparator",u"string_domain",b"string_domain",u"struct_domain",b"struct_domain",u"time_domain",b"time_domain",u"time_of_day_domain",b"time_of_day_domain",u"type",b"type",u"url_domain",b"url_domain",u"value_count",b"value_count"]) -> builtin___bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"annotation",b"annotation",u"bool_domain",b"bool_domain",u"deprecated",b"deprecated",u"distribution_constraints",b"distribution_constraints",u"domain",b"domain",u"domain_info",b"domain_info",u"drift_comparator",b"drift_comparator",u"float_domain",b"float_domain",u"group_presence",b"group_presence",u"image_domain",b"image_domain",u"in_environment",b"in_environment",u"int_domain",b"int_domain",u"lifecycle_stage",b"lifecycle_stage",u"mid_domain",b"mid_domain",u"name",b"name",u"natural_language_domain",b"natural_language_domain",u"not_in_environment",b"not_in_environment",u"presence",b"presence",u"presence_constraints",b"presence_constraints",u"shape",b"shape",u"shape_type",b"shape_type",u"skew_comparator",b"skew_comparator",u"string_domain",b"string_domain",u"struct_domain",b"struct_domain",u"time_domain",b"time_domain",u"time_of_day_domain",b"time_of_day_domain",u"type",b"type",u"url_domain",b"url_domain",u"value_count",b"value_count"]) -> None: ... - @typing___overload - def WhichOneof(self, oneof_group: typing_extensions___Literal[u"domain_info",b"domain_info"]) -> typing_extensions___Literal["domain","int_domain","float_domain","string_domain","bool_domain","struct_domain","natural_language_domain","image_domain","mid_domain","url_domain","time_domain","time_of_day_domain"]: ... - @typing___overload - def WhichOneof(self, oneof_group: typing_extensions___Literal[u"presence_constraints",b"presence_constraints"]) -> typing_extensions___Literal["presence","group_presence"]: ... - @typing___overload - def WhichOneof(self, oneof_group: typing_extensions___Literal[u"shape_type",b"shape_type"]) -> typing_extensions___Literal["shape","value_count"]: ... -type___Feature = Feature - -class Annotation(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - tag: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] = ... - comment: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] = ... - - @property - def extra_metadata(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[google___protobuf___any_pb2___Any]: ... - - def __init__(self, - *, - tag : typing___Optional[typing___Iterable[typing___Text]] = None, - comment : typing___Optional[typing___Iterable[typing___Text]] = None, - extra_metadata : typing___Optional[typing___Iterable[google___protobuf___any_pb2___Any]] = None, - ) -> None: ... - def ClearField(self, field_name: typing_extensions___Literal[u"comment",b"comment",u"extra_metadata",b"extra_metadata",u"tag",b"tag"]) -> None: ... -type___Annotation = Annotation - -class NumericValueComparator(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - min_fraction_threshold: builtin___float = ... - max_fraction_threshold: builtin___float = ... - - def __init__(self, - *, - min_fraction_threshold : typing___Optional[builtin___float] = None, - max_fraction_threshold : typing___Optional[builtin___float] = None, - ) -> None: ... - def HasField(self, field_name: typing_extensions___Literal[u"max_fraction_threshold",b"max_fraction_threshold",u"min_fraction_threshold",b"min_fraction_threshold"]) -> builtin___bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"max_fraction_threshold",b"max_fraction_threshold",u"min_fraction_threshold",b"min_fraction_threshold"]) -> None: ... -type___NumericValueComparator = NumericValueComparator - -class DatasetConstraints(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - min_examples_count: builtin___int = ... - - @property - def num_examples_drift_comparator(self) -> type___NumericValueComparator: ... - - @property - def num_examples_version_comparator(self) -> type___NumericValueComparator: ... - - def __init__(self, - *, - num_examples_drift_comparator : typing___Optional[type___NumericValueComparator] = None, - num_examples_version_comparator : typing___Optional[type___NumericValueComparator] = None, - min_examples_count : typing___Optional[builtin___int] = None, - ) -> None: ... - def HasField(self, field_name: typing_extensions___Literal[u"min_examples_count",b"min_examples_count",u"num_examples_drift_comparator",b"num_examples_drift_comparator",u"num_examples_version_comparator",b"num_examples_version_comparator"]) -> builtin___bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"min_examples_count",b"min_examples_count",u"num_examples_drift_comparator",b"num_examples_drift_comparator",u"num_examples_version_comparator",b"num_examples_version_comparator"]) -> None: ... -type___DatasetConstraints = DatasetConstraints - -class FixedShape(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - class Dim(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - size: builtin___int = ... - name: typing___Text = ... - - def __init__(self, - *, - size : typing___Optional[builtin___int] = None, - name : typing___Optional[typing___Text] = None, - ) -> None: ... - def HasField(self, field_name: typing_extensions___Literal[u"name",b"name",u"size",b"size"]) -> builtin___bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"name",b"name",u"size",b"size"]) -> None: ... - type___Dim = Dim - - - @property - def dim(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[type___FixedShape.Dim]: ... - - def __init__(self, - *, - dim : typing___Optional[typing___Iterable[type___FixedShape.Dim]] = None, - ) -> None: ... - def ClearField(self, field_name: typing_extensions___Literal[u"dim",b"dim"]) -> None: ... -type___FixedShape = FixedShape - -class ValueCount(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - min: builtin___int = ... - max: builtin___int = ... - - def __init__(self, - *, - min : typing___Optional[builtin___int] = None, - max : typing___Optional[builtin___int] = None, - ) -> None: ... - def HasField(self, field_name: typing_extensions___Literal[u"max",b"max",u"min",b"min"]) -> builtin___bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"max",b"max",u"min",b"min"]) -> None: ... -type___ValueCount = ValueCount - -class WeightedFeature(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - name: typing___Text = ... - lifecycle_stage: type___LifecycleStageValue = ... - - @property - def feature(self) -> tensorflow_metadata___proto___v0___path_pb2___Path: ... - - @property - def weight_feature(self) -> tensorflow_metadata___proto___v0___path_pb2___Path: ... - - def __init__(self, - *, - name : typing___Optional[typing___Text] = None, - feature : typing___Optional[tensorflow_metadata___proto___v0___path_pb2___Path] = None, - weight_feature : typing___Optional[tensorflow_metadata___proto___v0___path_pb2___Path] = None, - lifecycle_stage : typing___Optional[type___LifecycleStageValue] = None, - ) -> None: ... - def HasField(self, field_name: typing_extensions___Literal[u"feature",b"feature",u"lifecycle_stage",b"lifecycle_stage",u"name",b"name",u"weight_feature",b"weight_feature"]) -> builtin___bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"feature",b"feature",u"lifecycle_stage",b"lifecycle_stage",u"name",b"name",u"weight_feature",b"weight_feature"]) -> None: ... -type___WeightedFeature = WeightedFeature - -class SparseFeature(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - class IndexFeature(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - name: typing___Text = ... - - def __init__(self, - *, - name : typing___Optional[typing___Text] = None, - ) -> None: ... - def HasField(self, field_name: typing_extensions___Literal[u"name",b"name"]) -> builtin___bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"name",b"name"]) -> None: ... - type___IndexFeature = IndexFeature - - class ValueFeature(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - name: typing___Text = ... - - def __init__(self, - *, - name : typing___Optional[typing___Text] = None, - ) -> None: ... - def HasField(self, field_name: typing_extensions___Literal[u"name",b"name"]) -> builtin___bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"name",b"name"]) -> None: ... - type___ValueFeature = ValueFeature - - name: typing___Text = ... - deprecated: builtin___bool = ... - lifecycle_stage: type___LifecycleStageValue = ... - is_sorted: builtin___bool = ... - type: type___FeatureTypeValue = ... - - @property - def presence(self) -> type___FeaturePresence: ... - - @property - def dense_shape(self) -> type___FixedShape: ... - - @property - def index_feature(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[type___SparseFeature.IndexFeature]: ... - - @property - def value_feature(self) -> type___SparseFeature.ValueFeature: ... - - def __init__(self, - *, - name : typing___Optional[typing___Text] = None, - deprecated : typing___Optional[builtin___bool] = None, - lifecycle_stage : typing___Optional[type___LifecycleStageValue] = None, - presence : typing___Optional[type___FeaturePresence] = None, - dense_shape : typing___Optional[type___FixedShape] = None, - index_feature : typing___Optional[typing___Iterable[type___SparseFeature.IndexFeature]] = None, - is_sorted : typing___Optional[builtin___bool] = None, - value_feature : typing___Optional[type___SparseFeature.ValueFeature] = None, - type : typing___Optional[type___FeatureTypeValue] = None, - ) -> None: ... - def HasField(self, field_name: typing_extensions___Literal[u"dense_shape",b"dense_shape",u"deprecated",b"deprecated",u"is_sorted",b"is_sorted",u"lifecycle_stage",b"lifecycle_stage",u"name",b"name",u"presence",b"presence",u"type",b"type",u"value_feature",b"value_feature"]) -> builtin___bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"dense_shape",b"dense_shape",u"deprecated",b"deprecated",u"index_feature",b"index_feature",u"is_sorted",b"is_sorted",u"lifecycle_stage",b"lifecycle_stage",u"name",b"name",u"presence",b"presence",u"type",b"type",u"value_feature",b"value_feature"]) -> None: ... -type___SparseFeature = SparseFeature - -class DistributionConstraints(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - min_domain_mass: builtin___float = ... - - def __init__(self, - *, - min_domain_mass : typing___Optional[builtin___float] = None, - ) -> None: ... - def HasField(self, field_name: typing_extensions___Literal[u"min_domain_mass",b"min_domain_mass"]) -> builtin___bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"min_domain_mass",b"min_domain_mass"]) -> None: ... -type___DistributionConstraints = DistributionConstraints - -class IntDomain(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - name: typing___Text = ... - min: builtin___int = ... - max: builtin___int = ... - is_categorical: builtin___bool = ... - - def __init__(self, - *, - name : typing___Optional[typing___Text] = None, - min : typing___Optional[builtin___int] = None, - max : typing___Optional[builtin___int] = None, - is_categorical : typing___Optional[builtin___bool] = None, - ) -> None: ... - def HasField(self, field_name: typing_extensions___Literal[u"is_categorical",b"is_categorical",u"max",b"max",u"min",b"min",u"name",b"name"]) -> builtin___bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"is_categorical",b"is_categorical",u"max",b"max",u"min",b"min",u"name",b"name"]) -> None: ... -type___IntDomain = IntDomain - -class FloatDomain(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - name: typing___Text = ... - min: builtin___float = ... - max: builtin___float = ... - - def __init__(self, - *, - name : typing___Optional[typing___Text] = None, - min : typing___Optional[builtin___float] = None, - max : typing___Optional[builtin___float] = None, - ) -> None: ... - def HasField(self, field_name: typing_extensions___Literal[u"max",b"max",u"min",b"min",u"name",b"name"]) -> builtin___bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"max",b"max",u"min",b"min",u"name",b"name"]) -> None: ... -type___FloatDomain = FloatDomain - -class StructDomain(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - - @property - def feature(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[type___Feature]: ... - - @property - def sparse_feature(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[type___SparseFeature]: ... - - def __init__(self, - *, - feature : typing___Optional[typing___Iterable[type___Feature]] = None, - sparse_feature : typing___Optional[typing___Iterable[type___SparseFeature]] = None, - ) -> None: ... - def ClearField(self, field_name: typing_extensions___Literal[u"feature",b"feature",u"sparse_feature",b"sparse_feature"]) -> None: ... -type___StructDomain = StructDomain - -class StringDomain(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - name: typing___Text = ... - value: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] = ... - - def __init__(self, - *, - name : typing___Optional[typing___Text] = None, - value : typing___Optional[typing___Iterable[typing___Text]] = None, - ) -> None: ... - def HasField(self, field_name: typing_extensions___Literal[u"name",b"name"]) -> builtin___bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"name",b"name",u"value",b"value"]) -> None: ... -type___StringDomain = StringDomain - -class BoolDomain(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - name: typing___Text = ... - true_value: typing___Text = ... - false_value: typing___Text = ... - - def __init__(self, - *, - name : typing___Optional[typing___Text] = None, - true_value : typing___Optional[typing___Text] = None, - false_value : typing___Optional[typing___Text] = None, - ) -> None: ... - def HasField(self, field_name: typing_extensions___Literal[u"false_value",b"false_value",u"name",b"name",u"true_value",b"true_value"]) -> builtin___bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"false_value",b"false_value",u"name",b"name",u"true_value",b"true_value"]) -> None: ... -type___BoolDomain = BoolDomain - -class NaturalLanguageDomain(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - - def __init__(self, - ) -> None: ... -type___NaturalLanguageDomain = NaturalLanguageDomain - -class ImageDomain(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - - def __init__(self, - ) -> None: ... -type___ImageDomain = ImageDomain - -class MIDDomain(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - - def __init__(self, - ) -> None: ... -type___MIDDomain = MIDDomain - -class URLDomain(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - - def __init__(self, - ) -> None: ... -type___URLDomain = URLDomain - -class TimeDomain(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - IntegerTimeFormatValue = typing___NewType('IntegerTimeFormatValue', builtin___int) - type___IntegerTimeFormatValue = IntegerTimeFormatValue - IntegerTimeFormat: _IntegerTimeFormat - class _IntegerTimeFormat(google___protobuf___internal___enum_type_wrapper____EnumTypeWrapper[TimeDomain.IntegerTimeFormatValue]): - DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ... - FORMAT_UNKNOWN = typing___cast(TimeDomain.IntegerTimeFormatValue, 0) - UNIX_DAYS = typing___cast(TimeDomain.IntegerTimeFormatValue, 5) - UNIX_SECONDS = typing___cast(TimeDomain.IntegerTimeFormatValue, 1) - UNIX_MILLISECONDS = typing___cast(TimeDomain.IntegerTimeFormatValue, 2) - UNIX_MICROSECONDS = typing___cast(TimeDomain.IntegerTimeFormatValue, 3) - UNIX_NANOSECONDS = typing___cast(TimeDomain.IntegerTimeFormatValue, 4) - FORMAT_UNKNOWN = typing___cast(TimeDomain.IntegerTimeFormatValue, 0) - UNIX_DAYS = typing___cast(TimeDomain.IntegerTimeFormatValue, 5) - UNIX_SECONDS = typing___cast(TimeDomain.IntegerTimeFormatValue, 1) - UNIX_MILLISECONDS = typing___cast(TimeDomain.IntegerTimeFormatValue, 2) - UNIX_MICROSECONDS = typing___cast(TimeDomain.IntegerTimeFormatValue, 3) - UNIX_NANOSECONDS = typing___cast(TimeDomain.IntegerTimeFormatValue, 4) - - string_format: typing___Text = ... - integer_format: type___TimeDomain.IntegerTimeFormatValue = ... - - def __init__(self, - *, - string_format : typing___Optional[typing___Text] = None, - integer_format : typing___Optional[type___TimeDomain.IntegerTimeFormatValue] = None, - ) -> None: ... - def HasField(self, field_name: typing_extensions___Literal[u"format",b"format",u"integer_format",b"integer_format",u"string_format",b"string_format"]) -> builtin___bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"format",b"format",u"integer_format",b"integer_format",u"string_format",b"string_format"]) -> None: ... - def WhichOneof(self, oneof_group: typing_extensions___Literal[u"format",b"format"]) -> typing_extensions___Literal["string_format","integer_format"]: ... -type___TimeDomain = TimeDomain - -class TimeOfDayDomain(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - IntegerTimeOfDayFormatValue = typing___NewType('IntegerTimeOfDayFormatValue', builtin___int) - type___IntegerTimeOfDayFormatValue = IntegerTimeOfDayFormatValue - IntegerTimeOfDayFormat: _IntegerTimeOfDayFormat - class _IntegerTimeOfDayFormat(google___protobuf___internal___enum_type_wrapper____EnumTypeWrapper[TimeOfDayDomain.IntegerTimeOfDayFormatValue]): - DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ... - FORMAT_UNKNOWN = typing___cast(TimeOfDayDomain.IntegerTimeOfDayFormatValue, 0) - PACKED_64_NANOS = typing___cast(TimeOfDayDomain.IntegerTimeOfDayFormatValue, 1) - FORMAT_UNKNOWN = typing___cast(TimeOfDayDomain.IntegerTimeOfDayFormatValue, 0) - PACKED_64_NANOS = typing___cast(TimeOfDayDomain.IntegerTimeOfDayFormatValue, 1) - - string_format: typing___Text = ... - integer_format: type___TimeOfDayDomain.IntegerTimeOfDayFormatValue = ... - - def __init__(self, - *, - string_format : typing___Optional[typing___Text] = None, - integer_format : typing___Optional[type___TimeOfDayDomain.IntegerTimeOfDayFormatValue] = None, - ) -> None: ... - def HasField(self, field_name: typing_extensions___Literal[u"format",b"format",u"integer_format",b"integer_format",u"string_format",b"string_format"]) -> builtin___bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"format",b"format",u"integer_format",b"integer_format",u"string_format",b"string_format"]) -> None: ... - def WhichOneof(self, oneof_group: typing_extensions___Literal[u"format",b"format"]) -> typing_extensions___Literal["string_format","integer_format"]: ... -type___TimeOfDayDomain = TimeOfDayDomain - -class FeaturePresence(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - min_fraction: builtin___float = ... - min_count: builtin___int = ... - - def __init__(self, - *, - min_fraction : typing___Optional[builtin___float] = None, - min_count : typing___Optional[builtin___int] = None, - ) -> None: ... - def HasField(self, field_name: typing_extensions___Literal[u"min_count",b"min_count",u"min_fraction",b"min_fraction"]) -> builtin___bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"min_count",b"min_count",u"min_fraction",b"min_fraction"]) -> None: ... -type___FeaturePresence = FeaturePresence - -class FeaturePresenceWithinGroup(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - required: builtin___bool = ... - - def __init__(self, - *, - required : typing___Optional[builtin___bool] = None, - ) -> None: ... - def HasField(self, field_name: typing_extensions___Literal[u"required",b"required"]) -> builtin___bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"required",b"required"]) -> None: ... -type___FeaturePresenceWithinGroup = FeaturePresenceWithinGroup - -class InfinityNorm(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - threshold: builtin___float = ... - - def __init__(self, - *, - threshold : typing___Optional[builtin___float] = None, - ) -> None: ... - def HasField(self, field_name: typing_extensions___Literal[u"threshold",b"threshold"]) -> builtin___bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"threshold",b"threshold"]) -> None: ... -type___InfinityNorm = InfinityNorm - -class FeatureComparator(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - - @property - def infinity_norm(self) -> type___InfinityNorm: ... - - def __init__(self, - *, - infinity_norm : typing___Optional[type___InfinityNorm] = None, - ) -> None: ... - def HasField(self, field_name: typing_extensions___Literal[u"infinity_norm",b"infinity_norm"]) -> builtin___bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"infinity_norm",b"infinity_norm"]) -> None: ... -type___FeatureComparator = FeatureComparator - -class TensorRepresentation(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - class DefaultValue(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - float_value: builtin___float = ... - int_value: builtin___int = ... - bytes_value: builtin___bytes = ... - uint_value: builtin___int = ... - - def __init__(self, - *, - float_value : typing___Optional[builtin___float] = None, - int_value : typing___Optional[builtin___int] = None, - bytes_value : typing___Optional[builtin___bytes] = None, - uint_value : typing___Optional[builtin___int] = None, - ) -> None: ... - def HasField(self, field_name: typing_extensions___Literal[u"bytes_value",b"bytes_value",u"float_value",b"float_value",u"int_value",b"int_value",u"kind",b"kind",u"uint_value",b"uint_value"]) -> builtin___bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"bytes_value",b"bytes_value",u"float_value",b"float_value",u"int_value",b"int_value",u"kind",b"kind",u"uint_value",b"uint_value"]) -> None: ... - def WhichOneof(self, oneof_group: typing_extensions___Literal[u"kind",b"kind"]) -> typing_extensions___Literal["float_value","int_value","bytes_value","uint_value"]: ... - type___DefaultValue = DefaultValue - - class DenseTensor(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - column_name: typing___Text = ... - - @property - def shape(self) -> type___FixedShape: ... - - @property - def default_value(self) -> type___TensorRepresentation.DefaultValue: ... - - def __init__(self, - *, - column_name : typing___Optional[typing___Text] = None, - shape : typing___Optional[type___FixedShape] = None, - default_value : typing___Optional[type___TensorRepresentation.DefaultValue] = None, - ) -> None: ... - def HasField(self, field_name: typing_extensions___Literal[u"column_name",b"column_name",u"default_value",b"default_value",u"shape",b"shape"]) -> builtin___bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"column_name",b"column_name",u"default_value",b"default_value",u"shape",b"shape"]) -> None: ... - type___DenseTensor = DenseTensor - - class VarLenSparseTensor(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - column_name: typing___Text = ... - - def __init__(self, - *, - column_name : typing___Optional[typing___Text] = None, - ) -> None: ... - def HasField(self, field_name: typing_extensions___Literal[u"column_name",b"column_name"]) -> builtin___bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"column_name",b"column_name"]) -> None: ... - type___VarLenSparseTensor = VarLenSparseTensor - - class SparseTensor(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - index_column_names: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] = ... - value_column_name: typing___Text = ... - - @property - def dense_shape(self) -> type___FixedShape: ... - - def __init__(self, - *, - dense_shape : typing___Optional[type___FixedShape] = None, - index_column_names : typing___Optional[typing___Iterable[typing___Text]] = None, - value_column_name : typing___Optional[typing___Text] = None, - ) -> None: ... - def HasField(self, field_name: typing_extensions___Literal[u"dense_shape",b"dense_shape",u"value_column_name",b"value_column_name"]) -> builtin___bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"dense_shape",b"dense_shape",u"index_column_names",b"index_column_names",u"value_column_name",b"value_column_name"]) -> None: ... - type___SparseTensor = SparseTensor - - - @property - def dense_tensor(self) -> type___TensorRepresentation.DenseTensor: ... - - @property - def varlen_sparse_tensor(self) -> type___TensorRepresentation.VarLenSparseTensor: ... - - @property - def sparse_tensor(self) -> type___TensorRepresentation.SparseTensor: ... - - def __init__(self, - *, - dense_tensor : typing___Optional[type___TensorRepresentation.DenseTensor] = None, - varlen_sparse_tensor : typing___Optional[type___TensorRepresentation.VarLenSparseTensor] = None, - sparse_tensor : typing___Optional[type___TensorRepresentation.SparseTensor] = None, - ) -> None: ... - def HasField(self, field_name: typing_extensions___Literal[u"dense_tensor",b"dense_tensor",u"kind",b"kind",u"sparse_tensor",b"sparse_tensor",u"varlen_sparse_tensor",b"varlen_sparse_tensor"]) -> builtin___bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"dense_tensor",b"dense_tensor",u"kind",b"kind",u"sparse_tensor",b"sparse_tensor",u"varlen_sparse_tensor",b"varlen_sparse_tensor"]) -> None: ... - def WhichOneof(self, oneof_group: typing_extensions___Literal[u"kind",b"kind"]) -> typing_extensions___Literal["dense_tensor","varlen_sparse_tensor","sparse_tensor"]: ... -type___TensorRepresentation = TensorRepresentation - -class TensorRepresentationGroup(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - class TensorRepresentationEntry(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - key: typing___Text = ... - - @property - def value(self) -> type___TensorRepresentation: ... - - def __init__(self, - *, - key : typing___Optional[typing___Text] = None, - value : typing___Optional[type___TensorRepresentation] = None, - ) -> None: ... - def HasField(self, field_name: typing_extensions___Literal[u"key",b"key",u"value",b"value"]) -> builtin___bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"key",b"key",u"value",b"value"]) -> None: ... - type___TensorRepresentationEntry = TensorRepresentationEntry - - - @property - def tensor_representation(self) -> google___protobuf___internal___containers___MessageMap[typing___Text, type___TensorRepresentation]: ... - - def __init__(self, - *, - tensor_representation : typing___Optional[typing___Mapping[typing___Text, type___TensorRepresentation]] = None, - ) -> None: ... - def ClearField(self, field_name: typing_extensions___Literal[u"tensor_representation",b"tensor_representation"]) -> None: ... -type___TensorRepresentationGroup = TensorRepresentationGroup diff --git a/sdk/python/tensorflow_metadata/proto/v0/statistics_pb2.py b/sdk/python/tensorflow_metadata/proto/v0/statistics_pb2.py deleted file mode 100644 index 21473adc75..0000000000 --- a/sdk/python/tensorflow_metadata/proto/v0/statistics_pb2.py +++ /dev/null @@ -1,1754 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: tensorflow_metadata/proto/v0/statistics.proto -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from tensorflow_metadata.proto.v0 import path_pb2 as tensorflow__metadata_dot_proto_dot_v0_dot_path__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='tensorflow_metadata/proto/v0/statistics.proto', - package='tensorflow.metadata.v0', - syntax='proto3', - serialized_options=b'\n\032org.tensorflow.metadata.v0P\001ZEgithub.com/feast-dev/feast/sdk/go/protos/tensorflow_metadata/proto/v0\370\001\001', - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n-tensorflow_metadata/proto/v0/statistics.proto\x12\x16tensorflow.metadata.v0\x1a\'tensorflow_metadata/proto/v0/path.proto\"b\n\x1c\x44\x61tasetFeatureStatisticsList\x12\x42\n\x08\x64\x61tasets\x18\x01 \x03(\x0b\x32\x30.tensorflow.metadata.v0.DatasetFeatureStatistics\"\xe6\x01\n\x18\x44\x61tasetFeatureStatistics\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0cnum_examples\x18\x02 \x01(\x04\x12\x1d\n\x15weighted_num_examples\x18\x04 \x01(\x01\x12?\n\x08\x66\x65\x61tures\x18\x03 \x03(\x0b\x32-.tensorflow.metadata.v0.FeatureNameStatistics\x12\x46\n\x0e\x63ross_features\x18\x05 \x03(\x0b\x32..tensorflow.metadata.v0.CrossFeatureStatistics\"\xb4\x02\n\x16\x43rossFeatureStatistics\x12,\n\x06path_x\x18\x01 \x01(\x0b\x32\x1c.tensorflow.metadata.v0.Path\x12,\n\x06path_y\x18\x02 \x01(\x0b\x32\x1c.tensorflow.metadata.v0.Path\x12\r\n\x05\x63ount\x18\x03 \x01(\x04\x12I\n\x0fnum_cross_stats\x18\x04 \x01(\x0b\x32..tensorflow.metadata.v0.NumericCrossStatisticsH\x00\x12U\n\x17\x63\x61tegorical_cross_stats\x18\x05 \x01(\x0b\x32\x32.tensorflow.metadata.v0.CategoricalCrossStatisticsH\x00\x42\r\n\x0b\x63ross_stats\"A\n\x16NumericCrossStatistics\x12\x13\n\x0b\x63orrelation\x18\x01 \x01(\x02\x12\x12\n\ncovariance\x18\x02 \x01(\x02\"R\n\x1a\x43\x61tegoricalCrossStatistics\x12\x34\n\x04lift\x18\x01 \x01(\x0b\x32&.tensorflow.metadata.v0.LiftStatistics\"\x8b\x01\n\x0eLiftStatistics\x12\x37\n\x0blift_series\x18\x01 \x03(\x0b\x32\".tensorflow.metadata.v0.LiftSeries\x12@\n\x14weighted_lift_series\x18\x02 \x03(\x0b\x32\".tensorflow.metadata.v0.LiftSeries\"\x8d\x04\n\nLiftSeries\x12\x0f\n\x05y_int\x18\x01 \x01(\x05H\x00\x12\x12\n\x08y_string\x18\x02 \x01(\tH\x00\x12=\n\x08y_bucket\x18\x03 \x01(\x0b\x32).tensorflow.metadata.v0.LiftSeries.BucketH\x00\x12\x11\n\x07y_count\x18\x04 \x01(\x04H\x01\x12\x1a\n\x10weighted_y_count\x18\x05 \x01(\x01H\x01\x12\x41\n\x0blift_values\x18\x06 \x03(\x0b\x32,.tensorflow.metadata.v0.LiftSeries.LiftValue\x1a/\n\x06\x42ucket\x12\x11\n\tlow_value\x18\x01 \x01(\x01\x12\x12\n\nhigh_value\x18\x02 \x01(\x01\x1a\xdb\x01\n\tLiftValue\x12\x0f\n\x05x_int\x18\x01 \x01(\x05H\x00\x12\x12\n\x08x_string\x18\x02 \x01(\tH\x00\x12\x0c\n\x04lift\x18\x03 \x01(\x01\x12\x11\n\x07x_count\x18\x04 \x01(\x04H\x01\x12\x1a\n\x10weighted_x_count\x18\x05 \x01(\x01H\x01\x12\x17\n\rx_and_y_count\x18\x06 \x01(\x04H\x02\x12 \n\x16weighted_x_and_y_count\x18\x07 \x01(\x01H\x02\x42\t\n\x07x_valueB\x0f\n\rx_count_valueB\x15\n\x13x_and_y_count_valueB\t\n\x07y_valueB\x0f\n\ry_count_value\"\xae\x04\n\x15\x46\x65\x61tureNameStatistics\x12\x0e\n\x04name\x18\x01 \x01(\tH\x00\x12,\n\x04path\x18\x08 \x01(\x0b\x32\x1c.tensorflow.metadata.v0.PathH\x00\x12@\n\x04type\x18\x02 \x01(\x0e\x32\x32.tensorflow.metadata.v0.FeatureNameStatistics.Type\x12>\n\tnum_stats\x18\x03 \x01(\x0b\x32).tensorflow.metadata.v0.NumericStatisticsH\x01\x12@\n\x0cstring_stats\x18\x04 \x01(\x0b\x32(.tensorflow.metadata.v0.StringStatisticsH\x01\x12>\n\x0b\x62ytes_stats\x18\x05 \x01(\x0b\x32\'.tensorflow.metadata.v0.BytesStatisticsH\x01\x12@\n\x0cstruct_stats\x18\x07 \x01(\x0b\x32(.tensorflow.metadata.v0.StructStatisticsH\x01\x12=\n\x0c\x63ustom_stats\x18\x06 \x03(\x0b\x32\'.tensorflow.metadata.v0.CustomStatistic\"=\n\x04Type\x12\x07\n\x03INT\x10\x00\x12\t\n\x05\x46LOAT\x10\x01\x12\n\n\x06STRING\x10\x02\x12\t\n\x05\x42YTES\x10\x03\x12\n\n\x06STRUCT\x10\x04\x42\n\n\x08\x66ield_idB\x07\n\x05stats\"x\n\x18WeightedCommonStatistics\x12\x17\n\x0fnum_non_missing\x18\x01 \x01(\x01\x12\x13\n\x0bnum_missing\x18\x02 \x01(\x01\x12\x16\n\x0e\x61vg_num_values\x18\x03 \x01(\x01\x12\x16\n\x0etot_num_values\x18\x04 \x01(\x01\"\xbd\x01\n\x0f\x43ustomStatistic\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x03num\x18\x02 \x01(\x01H\x00\x12\r\n\x03str\x18\x03 \x01(\tH\x00\x12\x36\n\thistogram\x18\x04 \x01(\x0b\x32!.tensorflow.metadata.v0.HistogramH\x00\x12?\n\x0erank_histogram\x18\x05 \x01(\x0b\x32%.tensorflow.metadata.v0.RankHistogramH\x00\x42\x05\n\x03val\"\xb9\x02\n\x11NumericStatistics\x12>\n\x0c\x63ommon_stats\x18\x01 \x01(\x0b\x32(.tensorflow.metadata.v0.CommonStatistics\x12\x0c\n\x04mean\x18\x02 \x01(\x01\x12\x0f\n\x07std_dev\x18\x03 \x01(\x01\x12\x11\n\tnum_zeros\x18\x04 \x01(\x04\x12\x0b\n\x03min\x18\x05 \x01(\x01\x12\x0e\n\x06median\x18\x06 \x01(\x01\x12\x0b\n\x03max\x18\x07 \x01(\x01\x12\x35\n\nhistograms\x18\x08 \x03(\x0b\x32!.tensorflow.metadata.v0.Histogram\x12Q\n\x16weighted_numeric_stats\x18\t \x01(\x0b\x32\x31.tensorflow.metadata.v0.WeightedNumericStatistics\"\xa2\x03\n\x10StringStatistics\x12>\n\x0c\x63ommon_stats\x18\x01 \x01(\x0b\x32(.tensorflow.metadata.v0.CommonStatistics\x12\x0e\n\x06unique\x18\x02 \x01(\x04\x12I\n\ntop_values\x18\x03 \x03(\x0b\x32\x35.tensorflow.metadata.v0.StringStatistics.FreqAndValue\x12\x12\n\navg_length\x18\x04 \x01(\x02\x12=\n\x0erank_histogram\x18\x05 \x01(\x0b\x32%.tensorflow.metadata.v0.RankHistogram\x12O\n\x15weighted_string_stats\x18\x06 \x01(\x0b\x32\x30.tensorflow.metadata.v0.WeightedStringStatistics\x12\x17\n\x0fvocabulary_file\x18\x07 \x01(\t\x1a\x36\n\x0c\x46reqAndValue\x12\r\n\x05value\x18\x02 \x01(\t\x12\x11\n\tfrequency\x18\x03 \x01(\x01J\x04\x08\x01\x10\x02\"\x81\x01\n\x19WeightedNumericStatistics\x12\x0c\n\x04mean\x18\x01 \x01(\x01\x12\x0f\n\x07std_dev\x18\x02 \x01(\x01\x12\x0e\n\x06median\x18\x03 \x01(\x01\x12\x35\n\nhistograms\x18\x04 \x03(\x0b\x32!.tensorflow.metadata.v0.Histogram\"\xa4\x01\n\x18WeightedStringStatistics\x12I\n\ntop_values\x18\x01 \x03(\x0b\x32\x35.tensorflow.metadata.v0.StringStatistics.FreqAndValue\x12=\n\x0erank_histogram\x18\x02 \x01(\x0b\x32%.tensorflow.metadata.v0.RankHistogram\"\xa6\x01\n\x0f\x42ytesStatistics\x12>\n\x0c\x63ommon_stats\x18\x01 \x01(\x0b\x32(.tensorflow.metadata.v0.CommonStatistics\x12\x0e\n\x06unique\x18\x02 \x01(\x04\x12\x15\n\ravg_num_bytes\x18\x03 \x01(\x02\x12\x15\n\rmin_num_bytes\x18\x04 \x01(\x02\x12\x15\n\rmax_num_bytes\x18\x05 \x01(\x02\"R\n\x10StructStatistics\x12>\n\x0c\x63ommon_stats\x18\x01 \x01(\x0b\x32(.tensorflow.metadata.v0.CommonStatistics\"\xfc\x02\n\x10\x43ommonStatistics\x12\x17\n\x0fnum_non_missing\x18\x01 \x01(\x04\x12\x13\n\x0bnum_missing\x18\x02 \x01(\x04\x12\x16\n\x0emin_num_values\x18\x03 \x01(\x04\x12\x16\n\x0emax_num_values\x18\x04 \x01(\x04\x12\x16\n\x0e\x61vg_num_values\x18\x05 \x01(\x02\x12\x16\n\x0etot_num_values\x18\x08 \x01(\x04\x12?\n\x14num_values_histogram\x18\x06 \x01(\x0b\x32!.tensorflow.metadata.v0.Histogram\x12O\n\x15weighted_common_stats\x18\x07 \x01(\x0b\x32\x30.tensorflow.metadata.v0.WeightedCommonStatistics\x12H\n\x1d\x66\x65\x61ture_list_length_histogram\x18\t \x01(\x0b\x32!.tensorflow.metadata.v0.Histogram\"\xb6\x02\n\tHistogram\x12\x0f\n\x07num_nan\x18\x01 \x01(\x04\x12\x15\n\rnum_undefined\x18\x02 \x01(\x04\x12\x39\n\x07\x62uckets\x18\x03 \x03(\x0b\x32(.tensorflow.metadata.v0.Histogram.Bucket\x12=\n\x04type\x18\x04 \x01(\x0e\x32/.tensorflow.metadata.v0.Histogram.HistogramType\x12\x0c\n\x04name\x18\x05 \x01(\t\x1aK\n\x06\x42ucket\x12\x11\n\tlow_value\x18\x01 \x01(\x01\x12\x12\n\nhigh_value\x18\x02 \x01(\x01\x12\x14\n\x0csample_count\x18\x04 \x01(\x01J\x04\x08\x03\x10\x04\",\n\rHistogramType\x12\x0c\n\x08STANDARD\x10\x00\x12\r\n\tQUANTILES\x10\x01\"\xb6\x01\n\rRankHistogram\x12=\n\x07\x62uckets\x18\x01 \x03(\x0b\x32,.tensorflow.metadata.v0.RankHistogram.Bucket\x12\x0c\n\x04name\x18\x02 \x01(\t\x1aX\n\x06\x42ucket\x12\x10\n\x08low_rank\x18\x01 \x01(\x04\x12\x11\n\thigh_rank\x18\x02 \x01(\x04\x12\r\n\x05label\x18\x04 \x01(\t\x12\x14\n\x0csample_count\x18\x05 \x01(\x01J\x04\x08\x03\x10\x04\x42h\n\x1aorg.tensorflow.metadata.v0P\x01ZEgithub.com/feast-dev/feast/sdk/go/protos/tensorflow_metadata/proto/v0\xf8\x01\x01\x62\x06proto3' - , - dependencies=[tensorflow__metadata_dot_proto_dot_v0_dot_path__pb2.DESCRIPTOR,]) - - - -_FEATURENAMESTATISTICS_TYPE = _descriptor.EnumDescriptor( - name='Type', - full_name='tensorflow.metadata.v0.FeatureNameStatistics.Type', - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name='INT', index=0, number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='FLOAT', index=1, number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='STRING', index=2, number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='BYTES', index=3, number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='STRUCT', index=4, number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - ], - containing_type=None, - serialized_options=None, - serialized_start=2056, - serialized_end=2117, -) -_sym_db.RegisterEnumDescriptor(_FEATURENAMESTATISTICS_TYPE) - -_HISTOGRAM_HISTOGRAMTYPE = _descriptor.EnumDescriptor( - name='HistogramType', - full_name='tensorflow.metadata.v0.Histogram.HistogramType', - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name='STANDARD', index=0, number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='QUANTILES', index=1, number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - ], - containing_type=None, - serialized_options=None, - serialized_start=4393, - serialized_end=4437, -) -_sym_db.RegisterEnumDescriptor(_HISTOGRAM_HISTOGRAMTYPE) - - -_DATASETFEATURESTATISTICSLIST = _descriptor.Descriptor( - name='DatasetFeatureStatisticsList', - full_name='tensorflow.metadata.v0.DatasetFeatureStatisticsList', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='datasets', full_name='tensorflow.metadata.v0.DatasetFeatureStatisticsList.datasets', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=114, - serialized_end=212, -) - - -_DATASETFEATURESTATISTICS = _descriptor.Descriptor( - name='DatasetFeatureStatistics', - full_name='tensorflow.metadata.v0.DatasetFeatureStatistics', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='tensorflow.metadata.v0.DatasetFeatureStatistics.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='num_examples', full_name='tensorflow.metadata.v0.DatasetFeatureStatistics.num_examples', index=1, - number=2, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='weighted_num_examples', full_name='tensorflow.metadata.v0.DatasetFeatureStatistics.weighted_num_examples', index=2, - number=4, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='features', full_name='tensorflow.metadata.v0.DatasetFeatureStatistics.features', index=3, - number=3, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='cross_features', full_name='tensorflow.metadata.v0.DatasetFeatureStatistics.cross_features', index=4, - number=5, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=215, - serialized_end=445, -) - - -_CROSSFEATURESTATISTICS = _descriptor.Descriptor( - name='CrossFeatureStatistics', - full_name='tensorflow.metadata.v0.CrossFeatureStatistics', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='path_x', full_name='tensorflow.metadata.v0.CrossFeatureStatistics.path_x', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='path_y', full_name='tensorflow.metadata.v0.CrossFeatureStatistics.path_y', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='count', full_name='tensorflow.metadata.v0.CrossFeatureStatistics.count', index=2, - number=3, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='num_cross_stats', full_name='tensorflow.metadata.v0.CrossFeatureStatistics.num_cross_stats', index=3, - number=4, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='categorical_cross_stats', full_name='tensorflow.metadata.v0.CrossFeatureStatistics.categorical_cross_stats', index=4, - number=5, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='cross_stats', full_name='tensorflow.metadata.v0.CrossFeatureStatistics.cross_stats', - index=0, containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[]), - ], - serialized_start=448, - serialized_end=756, -) - - -_NUMERICCROSSSTATISTICS = _descriptor.Descriptor( - name='NumericCrossStatistics', - full_name='tensorflow.metadata.v0.NumericCrossStatistics', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='correlation', full_name='tensorflow.metadata.v0.NumericCrossStatistics.correlation', index=0, - number=1, type=2, cpp_type=6, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='covariance', full_name='tensorflow.metadata.v0.NumericCrossStatistics.covariance', index=1, - number=2, type=2, cpp_type=6, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=758, - serialized_end=823, -) - - -_CATEGORICALCROSSSTATISTICS = _descriptor.Descriptor( - name='CategoricalCrossStatistics', - full_name='tensorflow.metadata.v0.CategoricalCrossStatistics', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='lift', full_name='tensorflow.metadata.v0.CategoricalCrossStatistics.lift', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=825, - serialized_end=907, -) - - -_LIFTSTATISTICS = _descriptor.Descriptor( - name='LiftStatistics', - full_name='tensorflow.metadata.v0.LiftStatistics', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='lift_series', full_name='tensorflow.metadata.v0.LiftStatistics.lift_series', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='weighted_lift_series', full_name='tensorflow.metadata.v0.LiftStatistics.weighted_lift_series', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=910, - serialized_end=1049, -) - - -_LIFTSERIES_BUCKET = _descriptor.Descriptor( - name='Bucket', - full_name='tensorflow.metadata.v0.LiftSeries.Bucket', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='low_value', full_name='tensorflow.metadata.v0.LiftSeries.Bucket.low_value', index=0, - number=1, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='high_value', full_name='tensorflow.metadata.v0.LiftSeries.Bucket.high_value', index=1, - number=2, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1280, - serialized_end=1327, -) - -_LIFTSERIES_LIFTVALUE = _descriptor.Descriptor( - name='LiftValue', - full_name='tensorflow.metadata.v0.LiftSeries.LiftValue', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='x_int', full_name='tensorflow.metadata.v0.LiftSeries.LiftValue.x_int', index=0, - number=1, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='x_string', full_name='tensorflow.metadata.v0.LiftSeries.LiftValue.x_string', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='lift', full_name='tensorflow.metadata.v0.LiftSeries.LiftValue.lift', index=2, - number=3, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='x_count', full_name='tensorflow.metadata.v0.LiftSeries.LiftValue.x_count', index=3, - number=4, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='weighted_x_count', full_name='tensorflow.metadata.v0.LiftSeries.LiftValue.weighted_x_count', index=4, - number=5, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='x_and_y_count', full_name='tensorflow.metadata.v0.LiftSeries.LiftValue.x_and_y_count', index=5, - number=6, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='weighted_x_and_y_count', full_name='tensorflow.metadata.v0.LiftSeries.LiftValue.weighted_x_and_y_count', index=6, - number=7, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='x_value', full_name='tensorflow.metadata.v0.LiftSeries.LiftValue.x_value', - index=0, containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[]), - _descriptor.OneofDescriptor( - name='x_count_value', full_name='tensorflow.metadata.v0.LiftSeries.LiftValue.x_count_value', - index=1, containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[]), - _descriptor.OneofDescriptor( - name='x_and_y_count_value', full_name='tensorflow.metadata.v0.LiftSeries.LiftValue.x_and_y_count_value', - index=2, containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[]), - ], - serialized_start=1330, - serialized_end=1549, -) - -_LIFTSERIES = _descriptor.Descriptor( - name='LiftSeries', - full_name='tensorflow.metadata.v0.LiftSeries', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='y_int', full_name='tensorflow.metadata.v0.LiftSeries.y_int', index=0, - number=1, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='y_string', full_name='tensorflow.metadata.v0.LiftSeries.y_string', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='y_bucket', full_name='tensorflow.metadata.v0.LiftSeries.y_bucket', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='y_count', full_name='tensorflow.metadata.v0.LiftSeries.y_count', index=3, - number=4, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='weighted_y_count', full_name='tensorflow.metadata.v0.LiftSeries.weighted_y_count', index=4, - number=5, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='lift_values', full_name='tensorflow.metadata.v0.LiftSeries.lift_values', index=5, - number=6, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[_LIFTSERIES_BUCKET, _LIFTSERIES_LIFTVALUE, ], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='y_value', full_name='tensorflow.metadata.v0.LiftSeries.y_value', - index=0, containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[]), - _descriptor.OneofDescriptor( - name='y_count_value', full_name='tensorflow.metadata.v0.LiftSeries.y_count_value', - index=1, containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[]), - ], - serialized_start=1052, - serialized_end=1577, -) - - -_FEATURENAMESTATISTICS = _descriptor.Descriptor( - name='FeatureNameStatistics', - full_name='tensorflow.metadata.v0.FeatureNameStatistics', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='tensorflow.metadata.v0.FeatureNameStatistics.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='path', full_name='tensorflow.metadata.v0.FeatureNameStatistics.path', index=1, - number=8, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='type', full_name='tensorflow.metadata.v0.FeatureNameStatistics.type', index=2, - number=2, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='num_stats', full_name='tensorflow.metadata.v0.FeatureNameStatistics.num_stats', index=3, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='string_stats', full_name='tensorflow.metadata.v0.FeatureNameStatistics.string_stats', index=4, - number=4, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='bytes_stats', full_name='tensorflow.metadata.v0.FeatureNameStatistics.bytes_stats', index=5, - number=5, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='struct_stats', full_name='tensorflow.metadata.v0.FeatureNameStatistics.struct_stats', index=6, - number=7, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='custom_stats', full_name='tensorflow.metadata.v0.FeatureNameStatistics.custom_stats', index=7, - number=6, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _FEATURENAMESTATISTICS_TYPE, - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='field_id', full_name='tensorflow.metadata.v0.FeatureNameStatistics.field_id', - index=0, containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[]), - _descriptor.OneofDescriptor( - name='stats', full_name='tensorflow.metadata.v0.FeatureNameStatistics.stats', - index=1, containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[]), - ], - serialized_start=1580, - serialized_end=2138, -) - - -_WEIGHTEDCOMMONSTATISTICS = _descriptor.Descriptor( - name='WeightedCommonStatistics', - full_name='tensorflow.metadata.v0.WeightedCommonStatistics', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='num_non_missing', full_name='tensorflow.metadata.v0.WeightedCommonStatistics.num_non_missing', index=0, - number=1, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='num_missing', full_name='tensorflow.metadata.v0.WeightedCommonStatistics.num_missing', index=1, - number=2, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='avg_num_values', full_name='tensorflow.metadata.v0.WeightedCommonStatistics.avg_num_values', index=2, - number=3, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='tot_num_values', full_name='tensorflow.metadata.v0.WeightedCommonStatistics.tot_num_values', index=3, - number=4, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2140, - serialized_end=2260, -) - - -_CUSTOMSTATISTIC = _descriptor.Descriptor( - name='CustomStatistic', - full_name='tensorflow.metadata.v0.CustomStatistic', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='tensorflow.metadata.v0.CustomStatistic.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='num', full_name='tensorflow.metadata.v0.CustomStatistic.num', index=1, - number=2, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='str', full_name='tensorflow.metadata.v0.CustomStatistic.str', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='histogram', full_name='tensorflow.metadata.v0.CustomStatistic.histogram', index=3, - number=4, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='rank_histogram', full_name='tensorflow.metadata.v0.CustomStatistic.rank_histogram', index=4, - number=5, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='val', full_name='tensorflow.metadata.v0.CustomStatistic.val', - index=0, containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[]), - ], - serialized_start=2263, - serialized_end=2452, -) - - -_NUMERICSTATISTICS = _descriptor.Descriptor( - name='NumericStatistics', - full_name='tensorflow.metadata.v0.NumericStatistics', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='common_stats', full_name='tensorflow.metadata.v0.NumericStatistics.common_stats', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='mean', full_name='tensorflow.metadata.v0.NumericStatistics.mean', index=1, - number=2, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='std_dev', full_name='tensorflow.metadata.v0.NumericStatistics.std_dev', index=2, - number=3, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='num_zeros', full_name='tensorflow.metadata.v0.NumericStatistics.num_zeros', index=3, - number=4, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='min', full_name='tensorflow.metadata.v0.NumericStatistics.min', index=4, - number=5, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='median', full_name='tensorflow.metadata.v0.NumericStatistics.median', index=5, - number=6, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='max', full_name='tensorflow.metadata.v0.NumericStatistics.max', index=6, - number=7, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='histograms', full_name='tensorflow.metadata.v0.NumericStatistics.histograms', index=7, - number=8, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='weighted_numeric_stats', full_name='tensorflow.metadata.v0.NumericStatistics.weighted_numeric_stats', index=8, - number=9, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2455, - serialized_end=2768, -) - - -_STRINGSTATISTICS_FREQANDVALUE = _descriptor.Descriptor( - name='FreqAndValue', - full_name='tensorflow.metadata.v0.StringStatistics.FreqAndValue', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='value', full_name='tensorflow.metadata.v0.StringStatistics.FreqAndValue.value', index=0, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='frequency', full_name='tensorflow.metadata.v0.StringStatistics.FreqAndValue.frequency', index=1, - number=3, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=3135, - serialized_end=3189, -) - -_STRINGSTATISTICS = _descriptor.Descriptor( - name='StringStatistics', - full_name='tensorflow.metadata.v0.StringStatistics', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='common_stats', full_name='tensorflow.metadata.v0.StringStatistics.common_stats', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='unique', full_name='tensorflow.metadata.v0.StringStatistics.unique', index=1, - number=2, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='top_values', full_name='tensorflow.metadata.v0.StringStatistics.top_values', index=2, - number=3, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='avg_length', full_name='tensorflow.metadata.v0.StringStatistics.avg_length', index=3, - number=4, type=2, cpp_type=6, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='rank_histogram', full_name='tensorflow.metadata.v0.StringStatistics.rank_histogram', index=4, - number=5, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='weighted_string_stats', full_name='tensorflow.metadata.v0.StringStatistics.weighted_string_stats', index=5, - number=6, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='vocabulary_file', full_name='tensorflow.metadata.v0.StringStatistics.vocabulary_file', index=6, - number=7, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[_STRINGSTATISTICS_FREQANDVALUE, ], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2771, - serialized_end=3189, -) - - -_WEIGHTEDNUMERICSTATISTICS = _descriptor.Descriptor( - name='WeightedNumericStatistics', - full_name='tensorflow.metadata.v0.WeightedNumericStatistics', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='mean', full_name='tensorflow.metadata.v0.WeightedNumericStatistics.mean', index=0, - number=1, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='std_dev', full_name='tensorflow.metadata.v0.WeightedNumericStatistics.std_dev', index=1, - number=2, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='median', full_name='tensorflow.metadata.v0.WeightedNumericStatistics.median', index=2, - number=3, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='histograms', full_name='tensorflow.metadata.v0.WeightedNumericStatistics.histograms', index=3, - number=4, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=3192, - serialized_end=3321, -) - - -_WEIGHTEDSTRINGSTATISTICS = _descriptor.Descriptor( - name='WeightedStringStatistics', - full_name='tensorflow.metadata.v0.WeightedStringStatistics', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='top_values', full_name='tensorflow.metadata.v0.WeightedStringStatistics.top_values', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='rank_histogram', full_name='tensorflow.metadata.v0.WeightedStringStatistics.rank_histogram', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=3324, - serialized_end=3488, -) - - -_BYTESSTATISTICS = _descriptor.Descriptor( - name='BytesStatistics', - full_name='tensorflow.metadata.v0.BytesStatistics', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='common_stats', full_name='tensorflow.metadata.v0.BytesStatistics.common_stats', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='unique', full_name='tensorflow.metadata.v0.BytesStatistics.unique', index=1, - number=2, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='avg_num_bytes', full_name='tensorflow.metadata.v0.BytesStatistics.avg_num_bytes', index=2, - number=3, type=2, cpp_type=6, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='min_num_bytes', full_name='tensorflow.metadata.v0.BytesStatistics.min_num_bytes', index=3, - number=4, type=2, cpp_type=6, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='max_num_bytes', full_name='tensorflow.metadata.v0.BytesStatistics.max_num_bytes', index=4, - number=5, type=2, cpp_type=6, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=3491, - serialized_end=3657, -) - - -_STRUCTSTATISTICS = _descriptor.Descriptor( - name='StructStatistics', - full_name='tensorflow.metadata.v0.StructStatistics', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='common_stats', full_name='tensorflow.metadata.v0.StructStatistics.common_stats', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=3659, - serialized_end=3741, -) - - -_COMMONSTATISTICS = _descriptor.Descriptor( - name='CommonStatistics', - full_name='tensorflow.metadata.v0.CommonStatistics', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='num_non_missing', full_name='tensorflow.metadata.v0.CommonStatistics.num_non_missing', index=0, - number=1, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='num_missing', full_name='tensorflow.metadata.v0.CommonStatistics.num_missing', index=1, - number=2, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='min_num_values', full_name='tensorflow.metadata.v0.CommonStatistics.min_num_values', index=2, - number=3, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='max_num_values', full_name='tensorflow.metadata.v0.CommonStatistics.max_num_values', index=3, - number=4, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='avg_num_values', full_name='tensorflow.metadata.v0.CommonStatistics.avg_num_values', index=4, - number=5, type=2, cpp_type=6, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='tot_num_values', full_name='tensorflow.metadata.v0.CommonStatistics.tot_num_values', index=5, - number=8, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='num_values_histogram', full_name='tensorflow.metadata.v0.CommonStatistics.num_values_histogram', index=6, - number=6, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='weighted_common_stats', full_name='tensorflow.metadata.v0.CommonStatistics.weighted_common_stats', index=7, - number=7, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='feature_list_length_histogram', full_name='tensorflow.metadata.v0.CommonStatistics.feature_list_length_histogram', index=8, - number=9, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=3744, - serialized_end=4124, -) - - -_HISTOGRAM_BUCKET = _descriptor.Descriptor( - name='Bucket', - full_name='tensorflow.metadata.v0.Histogram.Bucket', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='low_value', full_name='tensorflow.metadata.v0.Histogram.Bucket.low_value', index=0, - number=1, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='high_value', full_name='tensorflow.metadata.v0.Histogram.Bucket.high_value', index=1, - number=2, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='sample_count', full_name='tensorflow.metadata.v0.Histogram.Bucket.sample_count', index=2, - number=4, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=4316, - serialized_end=4391, -) - -_HISTOGRAM = _descriptor.Descriptor( - name='Histogram', - full_name='tensorflow.metadata.v0.Histogram', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='num_nan', full_name='tensorflow.metadata.v0.Histogram.num_nan', index=0, - number=1, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='num_undefined', full_name='tensorflow.metadata.v0.Histogram.num_undefined', index=1, - number=2, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='buckets', full_name='tensorflow.metadata.v0.Histogram.buckets', index=2, - number=3, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='type', full_name='tensorflow.metadata.v0.Histogram.type', index=3, - number=4, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='name', full_name='tensorflow.metadata.v0.Histogram.name', index=4, - number=5, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[_HISTOGRAM_BUCKET, ], - enum_types=[ - _HISTOGRAM_HISTOGRAMTYPE, - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=4127, - serialized_end=4437, -) - - -_RANKHISTOGRAM_BUCKET = _descriptor.Descriptor( - name='Bucket', - full_name='tensorflow.metadata.v0.RankHistogram.Bucket', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='low_rank', full_name='tensorflow.metadata.v0.RankHistogram.Bucket.low_rank', index=0, - number=1, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='high_rank', full_name='tensorflow.metadata.v0.RankHistogram.Bucket.high_rank', index=1, - number=2, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='label', full_name='tensorflow.metadata.v0.RankHistogram.Bucket.label', index=2, - number=4, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='sample_count', full_name='tensorflow.metadata.v0.RankHistogram.Bucket.sample_count', index=3, - number=5, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=4534, - serialized_end=4622, -) - -_RANKHISTOGRAM = _descriptor.Descriptor( - name='RankHistogram', - full_name='tensorflow.metadata.v0.RankHistogram', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='buckets', full_name='tensorflow.metadata.v0.RankHistogram.buckets', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='name', full_name='tensorflow.metadata.v0.RankHistogram.name', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[_RANKHISTOGRAM_BUCKET, ], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=4440, - serialized_end=4622, -) - -_DATASETFEATURESTATISTICSLIST.fields_by_name['datasets'].message_type = _DATASETFEATURESTATISTICS -_DATASETFEATURESTATISTICS.fields_by_name['features'].message_type = _FEATURENAMESTATISTICS -_DATASETFEATURESTATISTICS.fields_by_name['cross_features'].message_type = _CROSSFEATURESTATISTICS -_CROSSFEATURESTATISTICS.fields_by_name['path_x'].message_type = tensorflow__metadata_dot_proto_dot_v0_dot_path__pb2._PATH -_CROSSFEATURESTATISTICS.fields_by_name['path_y'].message_type = tensorflow__metadata_dot_proto_dot_v0_dot_path__pb2._PATH -_CROSSFEATURESTATISTICS.fields_by_name['num_cross_stats'].message_type = _NUMERICCROSSSTATISTICS -_CROSSFEATURESTATISTICS.fields_by_name['categorical_cross_stats'].message_type = _CATEGORICALCROSSSTATISTICS -_CROSSFEATURESTATISTICS.oneofs_by_name['cross_stats'].fields.append( - _CROSSFEATURESTATISTICS.fields_by_name['num_cross_stats']) -_CROSSFEATURESTATISTICS.fields_by_name['num_cross_stats'].containing_oneof = _CROSSFEATURESTATISTICS.oneofs_by_name['cross_stats'] -_CROSSFEATURESTATISTICS.oneofs_by_name['cross_stats'].fields.append( - _CROSSFEATURESTATISTICS.fields_by_name['categorical_cross_stats']) -_CROSSFEATURESTATISTICS.fields_by_name['categorical_cross_stats'].containing_oneof = _CROSSFEATURESTATISTICS.oneofs_by_name['cross_stats'] -_CATEGORICALCROSSSTATISTICS.fields_by_name['lift'].message_type = _LIFTSTATISTICS -_LIFTSTATISTICS.fields_by_name['lift_series'].message_type = _LIFTSERIES -_LIFTSTATISTICS.fields_by_name['weighted_lift_series'].message_type = _LIFTSERIES -_LIFTSERIES_BUCKET.containing_type = _LIFTSERIES -_LIFTSERIES_LIFTVALUE.containing_type = _LIFTSERIES -_LIFTSERIES_LIFTVALUE.oneofs_by_name['x_value'].fields.append( - _LIFTSERIES_LIFTVALUE.fields_by_name['x_int']) -_LIFTSERIES_LIFTVALUE.fields_by_name['x_int'].containing_oneof = _LIFTSERIES_LIFTVALUE.oneofs_by_name['x_value'] -_LIFTSERIES_LIFTVALUE.oneofs_by_name['x_value'].fields.append( - _LIFTSERIES_LIFTVALUE.fields_by_name['x_string']) -_LIFTSERIES_LIFTVALUE.fields_by_name['x_string'].containing_oneof = _LIFTSERIES_LIFTVALUE.oneofs_by_name['x_value'] -_LIFTSERIES_LIFTVALUE.oneofs_by_name['x_count_value'].fields.append( - _LIFTSERIES_LIFTVALUE.fields_by_name['x_count']) -_LIFTSERIES_LIFTVALUE.fields_by_name['x_count'].containing_oneof = _LIFTSERIES_LIFTVALUE.oneofs_by_name['x_count_value'] -_LIFTSERIES_LIFTVALUE.oneofs_by_name['x_count_value'].fields.append( - _LIFTSERIES_LIFTVALUE.fields_by_name['weighted_x_count']) -_LIFTSERIES_LIFTVALUE.fields_by_name['weighted_x_count'].containing_oneof = _LIFTSERIES_LIFTVALUE.oneofs_by_name['x_count_value'] -_LIFTSERIES_LIFTVALUE.oneofs_by_name['x_and_y_count_value'].fields.append( - _LIFTSERIES_LIFTVALUE.fields_by_name['x_and_y_count']) -_LIFTSERIES_LIFTVALUE.fields_by_name['x_and_y_count'].containing_oneof = _LIFTSERIES_LIFTVALUE.oneofs_by_name['x_and_y_count_value'] -_LIFTSERIES_LIFTVALUE.oneofs_by_name['x_and_y_count_value'].fields.append( - _LIFTSERIES_LIFTVALUE.fields_by_name['weighted_x_and_y_count']) -_LIFTSERIES_LIFTVALUE.fields_by_name['weighted_x_and_y_count'].containing_oneof = _LIFTSERIES_LIFTVALUE.oneofs_by_name['x_and_y_count_value'] -_LIFTSERIES.fields_by_name['y_bucket'].message_type = _LIFTSERIES_BUCKET -_LIFTSERIES.fields_by_name['lift_values'].message_type = _LIFTSERIES_LIFTVALUE -_LIFTSERIES.oneofs_by_name['y_value'].fields.append( - _LIFTSERIES.fields_by_name['y_int']) -_LIFTSERIES.fields_by_name['y_int'].containing_oneof = _LIFTSERIES.oneofs_by_name['y_value'] -_LIFTSERIES.oneofs_by_name['y_value'].fields.append( - _LIFTSERIES.fields_by_name['y_string']) -_LIFTSERIES.fields_by_name['y_string'].containing_oneof = _LIFTSERIES.oneofs_by_name['y_value'] -_LIFTSERIES.oneofs_by_name['y_value'].fields.append( - _LIFTSERIES.fields_by_name['y_bucket']) -_LIFTSERIES.fields_by_name['y_bucket'].containing_oneof = _LIFTSERIES.oneofs_by_name['y_value'] -_LIFTSERIES.oneofs_by_name['y_count_value'].fields.append( - _LIFTSERIES.fields_by_name['y_count']) -_LIFTSERIES.fields_by_name['y_count'].containing_oneof = _LIFTSERIES.oneofs_by_name['y_count_value'] -_LIFTSERIES.oneofs_by_name['y_count_value'].fields.append( - _LIFTSERIES.fields_by_name['weighted_y_count']) -_LIFTSERIES.fields_by_name['weighted_y_count'].containing_oneof = _LIFTSERIES.oneofs_by_name['y_count_value'] -_FEATURENAMESTATISTICS.fields_by_name['path'].message_type = tensorflow__metadata_dot_proto_dot_v0_dot_path__pb2._PATH -_FEATURENAMESTATISTICS.fields_by_name['type'].enum_type = _FEATURENAMESTATISTICS_TYPE -_FEATURENAMESTATISTICS.fields_by_name['num_stats'].message_type = _NUMERICSTATISTICS -_FEATURENAMESTATISTICS.fields_by_name['string_stats'].message_type = _STRINGSTATISTICS -_FEATURENAMESTATISTICS.fields_by_name['bytes_stats'].message_type = _BYTESSTATISTICS -_FEATURENAMESTATISTICS.fields_by_name['struct_stats'].message_type = _STRUCTSTATISTICS -_FEATURENAMESTATISTICS.fields_by_name['custom_stats'].message_type = _CUSTOMSTATISTIC -_FEATURENAMESTATISTICS_TYPE.containing_type = _FEATURENAMESTATISTICS -_FEATURENAMESTATISTICS.oneofs_by_name['field_id'].fields.append( - _FEATURENAMESTATISTICS.fields_by_name['name']) -_FEATURENAMESTATISTICS.fields_by_name['name'].containing_oneof = _FEATURENAMESTATISTICS.oneofs_by_name['field_id'] -_FEATURENAMESTATISTICS.oneofs_by_name['field_id'].fields.append( - _FEATURENAMESTATISTICS.fields_by_name['path']) -_FEATURENAMESTATISTICS.fields_by_name['path'].containing_oneof = _FEATURENAMESTATISTICS.oneofs_by_name['field_id'] -_FEATURENAMESTATISTICS.oneofs_by_name['stats'].fields.append( - _FEATURENAMESTATISTICS.fields_by_name['num_stats']) -_FEATURENAMESTATISTICS.fields_by_name['num_stats'].containing_oneof = _FEATURENAMESTATISTICS.oneofs_by_name['stats'] -_FEATURENAMESTATISTICS.oneofs_by_name['stats'].fields.append( - _FEATURENAMESTATISTICS.fields_by_name['string_stats']) -_FEATURENAMESTATISTICS.fields_by_name['string_stats'].containing_oneof = _FEATURENAMESTATISTICS.oneofs_by_name['stats'] -_FEATURENAMESTATISTICS.oneofs_by_name['stats'].fields.append( - _FEATURENAMESTATISTICS.fields_by_name['bytes_stats']) -_FEATURENAMESTATISTICS.fields_by_name['bytes_stats'].containing_oneof = _FEATURENAMESTATISTICS.oneofs_by_name['stats'] -_FEATURENAMESTATISTICS.oneofs_by_name['stats'].fields.append( - _FEATURENAMESTATISTICS.fields_by_name['struct_stats']) -_FEATURENAMESTATISTICS.fields_by_name['struct_stats'].containing_oneof = _FEATURENAMESTATISTICS.oneofs_by_name['stats'] -_CUSTOMSTATISTIC.fields_by_name['histogram'].message_type = _HISTOGRAM -_CUSTOMSTATISTIC.fields_by_name['rank_histogram'].message_type = _RANKHISTOGRAM -_CUSTOMSTATISTIC.oneofs_by_name['val'].fields.append( - _CUSTOMSTATISTIC.fields_by_name['num']) -_CUSTOMSTATISTIC.fields_by_name['num'].containing_oneof = _CUSTOMSTATISTIC.oneofs_by_name['val'] -_CUSTOMSTATISTIC.oneofs_by_name['val'].fields.append( - _CUSTOMSTATISTIC.fields_by_name['str']) -_CUSTOMSTATISTIC.fields_by_name['str'].containing_oneof = _CUSTOMSTATISTIC.oneofs_by_name['val'] -_CUSTOMSTATISTIC.oneofs_by_name['val'].fields.append( - _CUSTOMSTATISTIC.fields_by_name['histogram']) -_CUSTOMSTATISTIC.fields_by_name['histogram'].containing_oneof = _CUSTOMSTATISTIC.oneofs_by_name['val'] -_CUSTOMSTATISTIC.oneofs_by_name['val'].fields.append( - _CUSTOMSTATISTIC.fields_by_name['rank_histogram']) -_CUSTOMSTATISTIC.fields_by_name['rank_histogram'].containing_oneof = _CUSTOMSTATISTIC.oneofs_by_name['val'] -_NUMERICSTATISTICS.fields_by_name['common_stats'].message_type = _COMMONSTATISTICS -_NUMERICSTATISTICS.fields_by_name['histograms'].message_type = _HISTOGRAM -_NUMERICSTATISTICS.fields_by_name['weighted_numeric_stats'].message_type = _WEIGHTEDNUMERICSTATISTICS -_STRINGSTATISTICS_FREQANDVALUE.containing_type = _STRINGSTATISTICS -_STRINGSTATISTICS.fields_by_name['common_stats'].message_type = _COMMONSTATISTICS -_STRINGSTATISTICS.fields_by_name['top_values'].message_type = _STRINGSTATISTICS_FREQANDVALUE -_STRINGSTATISTICS.fields_by_name['rank_histogram'].message_type = _RANKHISTOGRAM -_STRINGSTATISTICS.fields_by_name['weighted_string_stats'].message_type = _WEIGHTEDSTRINGSTATISTICS -_WEIGHTEDNUMERICSTATISTICS.fields_by_name['histograms'].message_type = _HISTOGRAM -_WEIGHTEDSTRINGSTATISTICS.fields_by_name['top_values'].message_type = _STRINGSTATISTICS_FREQANDVALUE -_WEIGHTEDSTRINGSTATISTICS.fields_by_name['rank_histogram'].message_type = _RANKHISTOGRAM -_BYTESSTATISTICS.fields_by_name['common_stats'].message_type = _COMMONSTATISTICS -_STRUCTSTATISTICS.fields_by_name['common_stats'].message_type = _COMMONSTATISTICS -_COMMONSTATISTICS.fields_by_name['num_values_histogram'].message_type = _HISTOGRAM -_COMMONSTATISTICS.fields_by_name['weighted_common_stats'].message_type = _WEIGHTEDCOMMONSTATISTICS -_COMMONSTATISTICS.fields_by_name['feature_list_length_histogram'].message_type = _HISTOGRAM -_HISTOGRAM_BUCKET.containing_type = _HISTOGRAM -_HISTOGRAM.fields_by_name['buckets'].message_type = _HISTOGRAM_BUCKET -_HISTOGRAM.fields_by_name['type'].enum_type = _HISTOGRAM_HISTOGRAMTYPE -_HISTOGRAM_HISTOGRAMTYPE.containing_type = _HISTOGRAM -_RANKHISTOGRAM_BUCKET.containing_type = _RANKHISTOGRAM -_RANKHISTOGRAM.fields_by_name['buckets'].message_type = _RANKHISTOGRAM_BUCKET -DESCRIPTOR.message_types_by_name['DatasetFeatureStatisticsList'] = _DATASETFEATURESTATISTICSLIST -DESCRIPTOR.message_types_by_name['DatasetFeatureStatistics'] = _DATASETFEATURESTATISTICS -DESCRIPTOR.message_types_by_name['CrossFeatureStatistics'] = _CROSSFEATURESTATISTICS -DESCRIPTOR.message_types_by_name['NumericCrossStatistics'] = _NUMERICCROSSSTATISTICS -DESCRIPTOR.message_types_by_name['CategoricalCrossStatistics'] = _CATEGORICALCROSSSTATISTICS -DESCRIPTOR.message_types_by_name['LiftStatistics'] = _LIFTSTATISTICS -DESCRIPTOR.message_types_by_name['LiftSeries'] = _LIFTSERIES -DESCRIPTOR.message_types_by_name['FeatureNameStatistics'] = _FEATURENAMESTATISTICS -DESCRIPTOR.message_types_by_name['WeightedCommonStatistics'] = _WEIGHTEDCOMMONSTATISTICS -DESCRIPTOR.message_types_by_name['CustomStatistic'] = _CUSTOMSTATISTIC -DESCRIPTOR.message_types_by_name['NumericStatistics'] = _NUMERICSTATISTICS -DESCRIPTOR.message_types_by_name['StringStatistics'] = _STRINGSTATISTICS -DESCRIPTOR.message_types_by_name['WeightedNumericStatistics'] = _WEIGHTEDNUMERICSTATISTICS -DESCRIPTOR.message_types_by_name['WeightedStringStatistics'] = _WEIGHTEDSTRINGSTATISTICS -DESCRIPTOR.message_types_by_name['BytesStatistics'] = _BYTESSTATISTICS -DESCRIPTOR.message_types_by_name['StructStatistics'] = _STRUCTSTATISTICS -DESCRIPTOR.message_types_by_name['CommonStatistics'] = _COMMONSTATISTICS -DESCRIPTOR.message_types_by_name['Histogram'] = _HISTOGRAM -DESCRIPTOR.message_types_by_name['RankHistogram'] = _RANKHISTOGRAM -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -DatasetFeatureStatisticsList = _reflection.GeneratedProtocolMessageType('DatasetFeatureStatisticsList', (_message.Message,), { - 'DESCRIPTOR' : _DATASETFEATURESTATISTICSLIST, - '__module__' : 'tensorflow_metadata.proto.v0.statistics_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.DatasetFeatureStatisticsList) - }) -_sym_db.RegisterMessage(DatasetFeatureStatisticsList) - -DatasetFeatureStatistics = _reflection.GeneratedProtocolMessageType('DatasetFeatureStatistics', (_message.Message,), { - 'DESCRIPTOR' : _DATASETFEATURESTATISTICS, - '__module__' : 'tensorflow_metadata.proto.v0.statistics_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.DatasetFeatureStatistics) - }) -_sym_db.RegisterMessage(DatasetFeatureStatistics) - -CrossFeatureStatistics = _reflection.GeneratedProtocolMessageType('CrossFeatureStatistics', (_message.Message,), { - 'DESCRIPTOR' : _CROSSFEATURESTATISTICS, - '__module__' : 'tensorflow_metadata.proto.v0.statistics_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.CrossFeatureStatistics) - }) -_sym_db.RegisterMessage(CrossFeatureStatistics) - -NumericCrossStatistics = _reflection.GeneratedProtocolMessageType('NumericCrossStatistics', (_message.Message,), { - 'DESCRIPTOR' : _NUMERICCROSSSTATISTICS, - '__module__' : 'tensorflow_metadata.proto.v0.statistics_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.NumericCrossStatistics) - }) -_sym_db.RegisterMessage(NumericCrossStatistics) - -CategoricalCrossStatistics = _reflection.GeneratedProtocolMessageType('CategoricalCrossStatistics', (_message.Message,), { - 'DESCRIPTOR' : _CATEGORICALCROSSSTATISTICS, - '__module__' : 'tensorflow_metadata.proto.v0.statistics_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.CategoricalCrossStatistics) - }) -_sym_db.RegisterMessage(CategoricalCrossStatistics) - -LiftStatistics = _reflection.GeneratedProtocolMessageType('LiftStatistics', (_message.Message,), { - 'DESCRIPTOR' : _LIFTSTATISTICS, - '__module__' : 'tensorflow_metadata.proto.v0.statistics_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.LiftStatistics) - }) -_sym_db.RegisterMessage(LiftStatistics) - -LiftSeries = _reflection.GeneratedProtocolMessageType('LiftSeries', (_message.Message,), { - - 'Bucket' : _reflection.GeneratedProtocolMessageType('Bucket', (_message.Message,), { - 'DESCRIPTOR' : _LIFTSERIES_BUCKET, - '__module__' : 'tensorflow_metadata.proto.v0.statistics_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.LiftSeries.Bucket) - }) - , - - 'LiftValue' : _reflection.GeneratedProtocolMessageType('LiftValue', (_message.Message,), { - 'DESCRIPTOR' : _LIFTSERIES_LIFTVALUE, - '__module__' : 'tensorflow_metadata.proto.v0.statistics_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.LiftSeries.LiftValue) - }) - , - 'DESCRIPTOR' : _LIFTSERIES, - '__module__' : 'tensorflow_metadata.proto.v0.statistics_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.LiftSeries) - }) -_sym_db.RegisterMessage(LiftSeries) -_sym_db.RegisterMessage(LiftSeries.Bucket) -_sym_db.RegisterMessage(LiftSeries.LiftValue) - -FeatureNameStatistics = _reflection.GeneratedProtocolMessageType('FeatureNameStatistics', (_message.Message,), { - 'DESCRIPTOR' : _FEATURENAMESTATISTICS, - '__module__' : 'tensorflow_metadata.proto.v0.statistics_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.FeatureNameStatistics) - }) -_sym_db.RegisterMessage(FeatureNameStatistics) - -WeightedCommonStatistics = _reflection.GeneratedProtocolMessageType('WeightedCommonStatistics', (_message.Message,), { - 'DESCRIPTOR' : _WEIGHTEDCOMMONSTATISTICS, - '__module__' : 'tensorflow_metadata.proto.v0.statistics_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.WeightedCommonStatistics) - }) -_sym_db.RegisterMessage(WeightedCommonStatistics) - -CustomStatistic = _reflection.GeneratedProtocolMessageType('CustomStatistic', (_message.Message,), { - 'DESCRIPTOR' : _CUSTOMSTATISTIC, - '__module__' : 'tensorflow_metadata.proto.v0.statistics_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.CustomStatistic) - }) -_sym_db.RegisterMessage(CustomStatistic) - -NumericStatistics = _reflection.GeneratedProtocolMessageType('NumericStatistics', (_message.Message,), { - 'DESCRIPTOR' : _NUMERICSTATISTICS, - '__module__' : 'tensorflow_metadata.proto.v0.statistics_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.NumericStatistics) - }) -_sym_db.RegisterMessage(NumericStatistics) - -StringStatistics = _reflection.GeneratedProtocolMessageType('StringStatistics', (_message.Message,), { - - 'FreqAndValue' : _reflection.GeneratedProtocolMessageType('FreqAndValue', (_message.Message,), { - 'DESCRIPTOR' : _STRINGSTATISTICS_FREQANDVALUE, - '__module__' : 'tensorflow_metadata.proto.v0.statistics_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.StringStatistics.FreqAndValue) - }) - , - 'DESCRIPTOR' : _STRINGSTATISTICS, - '__module__' : 'tensorflow_metadata.proto.v0.statistics_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.StringStatistics) - }) -_sym_db.RegisterMessage(StringStatistics) -_sym_db.RegisterMessage(StringStatistics.FreqAndValue) - -WeightedNumericStatistics = _reflection.GeneratedProtocolMessageType('WeightedNumericStatistics', (_message.Message,), { - 'DESCRIPTOR' : _WEIGHTEDNUMERICSTATISTICS, - '__module__' : 'tensorflow_metadata.proto.v0.statistics_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.WeightedNumericStatistics) - }) -_sym_db.RegisterMessage(WeightedNumericStatistics) - -WeightedStringStatistics = _reflection.GeneratedProtocolMessageType('WeightedStringStatistics', (_message.Message,), { - 'DESCRIPTOR' : _WEIGHTEDSTRINGSTATISTICS, - '__module__' : 'tensorflow_metadata.proto.v0.statistics_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.WeightedStringStatistics) - }) -_sym_db.RegisterMessage(WeightedStringStatistics) - -BytesStatistics = _reflection.GeneratedProtocolMessageType('BytesStatistics', (_message.Message,), { - 'DESCRIPTOR' : _BYTESSTATISTICS, - '__module__' : 'tensorflow_metadata.proto.v0.statistics_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.BytesStatistics) - }) -_sym_db.RegisterMessage(BytesStatistics) - -StructStatistics = _reflection.GeneratedProtocolMessageType('StructStatistics', (_message.Message,), { - 'DESCRIPTOR' : _STRUCTSTATISTICS, - '__module__' : 'tensorflow_metadata.proto.v0.statistics_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.StructStatistics) - }) -_sym_db.RegisterMessage(StructStatistics) - -CommonStatistics = _reflection.GeneratedProtocolMessageType('CommonStatistics', (_message.Message,), { - 'DESCRIPTOR' : _COMMONSTATISTICS, - '__module__' : 'tensorflow_metadata.proto.v0.statistics_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.CommonStatistics) - }) -_sym_db.RegisterMessage(CommonStatistics) - -Histogram = _reflection.GeneratedProtocolMessageType('Histogram', (_message.Message,), { - - 'Bucket' : _reflection.GeneratedProtocolMessageType('Bucket', (_message.Message,), { - 'DESCRIPTOR' : _HISTOGRAM_BUCKET, - '__module__' : 'tensorflow_metadata.proto.v0.statistics_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.Histogram.Bucket) - }) - , - 'DESCRIPTOR' : _HISTOGRAM, - '__module__' : 'tensorflow_metadata.proto.v0.statistics_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.Histogram) - }) -_sym_db.RegisterMessage(Histogram) -_sym_db.RegisterMessage(Histogram.Bucket) - -RankHistogram = _reflection.GeneratedProtocolMessageType('RankHistogram', (_message.Message,), { - - 'Bucket' : _reflection.GeneratedProtocolMessageType('Bucket', (_message.Message,), { - 'DESCRIPTOR' : _RANKHISTOGRAM_BUCKET, - '__module__' : 'tensorflow_metadata.proto.v0.statistics_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.RankHistogram.Bucket) - }) - , - 'DESCRIPTOR' : _RANKHISTOGRAM, - '__module__' : 'tensorflow_metadata.proto.v0.statistics_pb2' - # @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.RankHistogram) - }) -_sym_db.RegisterMessage(RankHistogram) -_sym_db.RegisterMessage(RankHistogram.Bucket) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/tensorflow_metadata/proto/v0/statistics_pb2.pyi b/sdk/python/tensorflow_metadata/proto/v0/statistics_pb2.pyi deleted file mode 100644 index e2e3923bf0..0000000000 --- a/sdk/python/tensorflow_metadata/proto/v0/statistics_pb2.pyi +++ /dev/null @@ -1,593 +0,0 @@ -""" -@generated by mypy-protobuf. Do not edit manually! -isort:skip_file -""" -from google.protobuf.descriptor import ( - Descriptor as google___protobuf___descriptor___Descriptor, - EnumDescriptor as google___protobuf___descriptor___EnumDescriptor, - FileDescriptor as google___protobuf___descriptor___FileDescriptor, -) - -from google.protobuf.internal.containers import ( - RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer, -) - -from google.protobuf.internal.enum_type_wrapper import ( - _EnumTypeWrapper as google___protobuf___internal___enum_type_wrapper____EnumTypeWrapper, -) - -from google.protobuf.message import ( - Message as google___protobuf___message___Message, -) - -from tensorflow_metadata.proto.v0.path_pb2 import ( - Path as tensorflow_metadata___proto___v0___path_pb2___Path, -) - -from typing import ( - Iterable as typing___Iterable, - NewType as typing___NewType, - Optional as typing___Optional, - Text as typing___Text, - cast as typing___cast, - overload as typing___overload, -) - -from typing_extensions import ( - Literal as typing_extensions___Literal, -) - - -builtin___bool = bool -builtin___bytes = bytes -builtin___float = float -builtin___int = int - - -DESCRIPTOR: google___protobuf___descriptor___FileDescriptor = ... - -class DatasetFeatureStatisticsList(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - - @property - def datasets(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[type___DatasetFeatureStatistics]: ... - - def __init__(self, - *, - datasets : typing___Optional[typing___Iterable[type___DatasetFeatureStatistics]] = None, - ) -> None: ... - def ClearField(self, field_name: typing_extensions___Literal[u"datasets",b"datasets"]) -> None: ... -type___DatasetFeatureStatisticsList = DatasetFeatureStatisticsList - -class DatasetFeatureStatistics(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - name: typing___Text = ... - num_examples: builtin___int = ... - weighted_num_examples: builtin___float = ... - - @property - def features(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[type___FeatureNameStatistics]: ... - - @property - def cross_features(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[type___CrossFeatureStatistics]: ... - - def __init__(self, - *, - name : typing___Optional[typing___Text] = None, - num_examples : typing___Optional[builtin___int] = None, - weighted_num_examples : typing___Optional[builtin___float] = None, - features : typing___Optional[typing___Iterable[type___FeatureNameStatistics]] = None, - cross_features : typing___Optional[typing___Iterable[type___CrossFeatureStatistics]] = None, - ) -> None: ... - def ClearField(self, field_name: typing_extensions___Literal[u"cross_features",b"cross_features",u"features",b"features",u"name",b"name",u"num_examples",b"num_examples",u"weighted_num_examples",b"weighted_num_examples"]) -> None: ... -type___DatasetFeatureStatistics = DatasetFeatureStatistics - -class CrossFeatureStatistics(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - count: builtin___int = ... - - @property - def path_x(self) -> tensorflow_metadata___proto___v0___path_pb2___Path: ... - - @property - def path_y(self) -> tensorflow_metadata___proto___v0___path_pb2___Path: ... - - @property - def num_cross_stats(self) -> type___NumericCrossStatistics: ... - - @property - def categorical_cross_stats(self) -> type___CategoricalCrossStatistics: ... - - def __init__(self, - *, - path_x : typing___Optional[tensorflow_metadata___proto___v0___path_pb2___Path] = None, - path_y : typing___Optional[tensorflow_metadata___proto___v0___path_pb2___Path] = None, - count : typing___Optional[builtin___int] = None, - num_cross_stats : typing___Optional[type___NumericCrossStatistics] = None, - categorical_cross_stats : typing___Optional[type___CategoricalCrossStatistics] = None, - ) -> None: ... - def HasField(self, field_name: typing_extensions___Literal[u"categorical_cross_stats",b"categorical_cross_stats",u"cross_stats",b"cross_stats",u"num_cross_stats",b"num_cross_stats",u"path_x",b"path_x",u"path_y",b"path_y"]) -> builtin___bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"categorical_cross_stats",b"categorical_cross_stats",u"count",b"count",u"cross_stats",b"cross_stats",u"num_cross_stats",b"num_cross_stats",u"path_x",b"path_x",u"path_y",b"path_y"]) -> None: ... - def WhichOneof(self, oneof_group: typing_extensions___Literal[u"cross_stats",b"cross_stats"]) -> typing_extensions___Literal["num_cross_stats","categorical_cross_stats"]: ... -type___CrossFeatureStatistics = CrossFeatureStatistics - -class NumericCrossStatistics(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - correlation: builtin___float = ... - covariance: builtin___float = ... - - def __init__(self, - *, - correlation : typing___Optional[builtin___float] = None, - covariance : typing___Optional[builtin___float] = None, - ) -> None: ... - def ClearField(self, field_name: typing_extensions___Literal[u"correlation",b"correlation",u"covariance",b"covariance"]) -> None: ... -type___NumericCrossStatistics = NumericCrossStatistics - -class CategoricalCrossStatistics(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - - @property - def lift(self) -> type___LiftStatistics: ... - - def __init__(self, - *, - lift : typing___Optional[type___LiftStatistics] = None, - ) -> None: ... - def HasField(self, field_name: typing_extensions___Literal[u"lift",b"lift"]) -> builtin___bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"lift",b"lift"]) -> None: ... -type___CategoricalCrossStatistics = CategoricalCrossStatistics - -class LiftStatistics(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - - @property - def lift_series(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[type___LiftSeries]: ... - - @property - def weighted_lift_series(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[type___LiftSeries]: ... - - def __init__(self, - *, - lift_series : typing___Optional[typing___Iterable[type___LiftSeries]] = None, - weighted_lift_series : typing___Optional[typing___Iterable[type___LiftSeries]] = None, - ) -> None: ... - def ClearField(self, field_name: typing_extensions___Literal[u"lift_series",b"lift_series",u"weighted_lift_series",b"weighted_lift_series"]) -> None: ... -type___LiftStatistics = LiftStatistics - -class LiftSeries(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - class Bucket(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - low_value: builtin___float = ... - high_value: builtin___float = ... - - def __init__(self, - *, - low_value : typing___Optional[builtin___float] = None, - high_value : typing___Optional[builtin___float] = None, - ) -> None: ... - def ClearField(self, field_name: typing_extensions___Literal[u"high_value",b"high_value",u"low_value",b"low_value"]) -> None: ... - type___Bucket = Bucket - - class LiftValue(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - x_int: builtin___int = ... - x_string: typing___Text = ... - lift: builtin___float = ... - x_count: builtin___int = ... - weighted_x_count: builtin___float = ... - x_and_y_count: builtin___int = ... - weighted_x_and_y_count: builtin___float = ... - - def __init__(self, - *, - x_int : typing___Optional[builtin___int] = None, - x_string : typing___Optional[typing___Text] = None, - lift : typing___Optional[builtin___float] = None, - x_count : typing___Optional[builtin___int] = None, - weighted_x_count : typing___Optional[builtin___float] = None, - x_and_y_count : typing___Optional[builtin___int] = None, - weighted_x_and_y_count : typing___Optional[builtin___float] = None, - ) -> None: ... - def HasField(self, field_name: typing_extensions___Literal[u"weighted_x_and_y_count",b"weighted_x_and_y_count",u"weighted_x_count",b"weighted_x_count",u"x_and_y_count",b"x_and_y_count",u"x_and_y_count_value",b"x_and_y_count_value",u"x_count",b"x_count",u"x_count_value",b"x_count_value",u"x_int",b"x_int",u"x_string",b"x_string",u"x_value",b"x_value"]) -> builtin___bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"lift",b"lift",u"weighted_x_and_y_count",b"weighted_x_and_y_count",u"weighted_x_count",b"weighted_x_count",u"x_and_y_count",b"x_and_y_count",u"x_and_y_count_value",b"x_and_y_count_value",u"x_count",b"x_count",u"x_count_value",b"x_count_value",u"x_int",b"x_int",u"x_string",b"x_string",u"x_value",b"x_value"]) -> None: ... - @typing___overload - def WhichOneof(self, oneof_group: typing_extensions___Literal[u"x_and_y_count_value",b"x_and_y_count_value"]) -> typing_extensions___Literal["x_and_y_count","weighted_x_and_y_count"]: ... - @typing___overload - def WhichOneof(self, oneof_group: typing_extensions___Literal[u"x_count_value",b"x_count_value"]) -> typing_extensions___Literal["x_count","weighted_x_count"]: ... - @typing___overload - def WhichOneof(self, oneof_group: typing_extensions___Literal[u"x_value",b"x_value"]) -> typing_extensions___Literal["x_int","x_string"]: ... - type___LiftValue = LiftValue - - y_int: builtin___int = ... - y_string: typing___Text = ... - y_count: builtin___int = ... - weighted_y_count: builtin___float = ... - - @property - def y_bucket(self) -> type___LiftSeries.Bucket: ... - - @property - def lift_values(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[type___LiftSeries.LiftValue]: ... - - def __init__(self, - *, - y_int : typing___Optional[builtin___int] = None, - y_string : typing___Optional[typing___Text] = None, - y_bucket : typing___Optional[type___LiftSeries.Bucket] = None, - y_count : typing___Optional[builtin___int] = None, - weighted_y_count : typing___Optional[builtin___float] = None, - lift_values : typing___Optional[typing___Iterable[type___LiftSeries.LiftValue]] = None, - ) -> None: ... - def HasField(self, field_name: typing_extensions___Literal[u"weighted_y_count",b"weighted_y_count",u"y_bucket",b"y_bucket",u"y_count",b"y_count",u"y_count_value",b"y_count_value",u"y_int",b"y_int",u"y_string",b"y_string",u"y_value",b"y_value"]) -> builtin___bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"lift_values",b"lift_values",u"weighted_y_count",b"weighted_y_count",u"y_bucket",b"y_bucket",u"y_count",b"y_count",u"y_count_value",b"y_count_value",u"y_int",b"y_int",u"y_string",b"y_string",u"y_value",b"y_value"]) -> None: ... - @typing___overload - def WhichOneof(self, oneof_group: typing_extensions___Literal[u"y_count_value",b"y_count_value"]) -> typing_extensions___Literal["y_count","weighted_y_count"]: ... - @typing___overload - def WhichOneof(self, oneof_group: typing_extensions___Literal[u"y_value",b"y_value"]) -> typing_extensions___Literal["y_int","y_string","y_bucket"]: ... -type___LiftSeries = LiftSeries - -class FeatureNameStatistics(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - TypeValue = typing___NewType('TypeValue', builtin___int) - type___TypeValue = TypeValue - Type: _Type - class _Type(google___protobuf___internal___enum_type_wrapper____EnumTypeWrapper[FeatureNameStatistics.TypeValue]): - DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ... - INT = typing___cast(FeatureNameStatistics.TypeValue, 0) - FLOAT = typing___cast(FeatureNameStatistics.TypeValue, 1) - STRING = typing___cast(FeatureNameStatistics.TypeValue, 2) - BYTES = typing___cast(FeatureNameStatistics.TypeValue, 3) - STRUCT = typing___cast(FeatureNameStatistics.TypeValue, 4) - INT = typing___cast(FeatureNameStatistics.TypeValue, 0) - FLOAT = typing___cast(FeatureNameStatistics.TypeValue, 1) - STRING = typing___cast(FeatureNameStatistics.TypeValue, 2) - BYTES = typing___cast(FeatureNameStatistics.TypeValue, 3) - STRUCT = typing___cast(FeatureNameStatistics.TypeValue, 4) - - name: typing___Text = ... - type: type___FeatureNameStatistics.TypeValue = ... - - @property - def path(self) -> tensorflow_metadata___proto___v0___path_pb2___Path: ... - - @property - def num_stats(self) -> type___NumericStatistics: ... - - @property - def string_stats(self) -> type___StringStatistics: ... - - @property - def bytes_stats(self) -> type___BytesStatistics: ... - - @property - def struct_stats(self) -> type___StructStatistics: ... - - @property - def custom_stats(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[type___CustomStatistic]: ... - - def __init__(self, - *, - name : typing___Optional[typing___Text] = None, - path : typing___Optional[tensorflow_metadata___proto___v0___path_pb2___Path] = None, - type : typing___Optional[type___FeatureNameStatistics.TypeValue] = None, - num_stats : typing___Optional[type___NumericStatistics] = None, - string_stats : typing___Optional[type___StringStatistics] = None, - bytes_stats : typing___Optional[type___BytesStatistics] = None, - struct_stats : typing___Optional[type___StructStatistics] = None, - custom_stats : typing___Optional[typing___Iterable[type___CustomStatistic]] = None, - ) -> None: ... - def HasField(self, field_name: typing_extensions___Literal[u"bytes_stats",b"bytes_stats",u"field_id",b"field_id",u"name",b"name",u"num_stats",b"num_stats",u"path",b"path",u"stats",b"stats",u"string_stats",b"string_stats",u"struct_stats",b"struct_stats"]) -> builtin___bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"bytes_stats",b"bytes_stats",u"custom_stats",b"custom_stats",u"field_id",b"field_id",u"name",b"name",u"num_stats",b"num_stats",u"path",b"path",u"stats",b"stats",u"string_stats",b"string_stats",u"struct_stats",b"struct_stats",u"type",b"type"]) -> None: ... - @typing___overload - def WhichOneof(self, oneof_group: typing_extensions___Literal[u"field_id",b"field_id"]) -> typing_extensions___Literal["name","path"]: ... - @typing___overload - def WhichOneof(self, oneof_group: typing_extensions___Literal[u"stats",b"stats"]) -> typing_extensions___Literal["num_stats","string_stats","bytes_stats","struct_stats"]: ... -type___FeatureNameStatistics = FeatureNameStatistics - -class WeightedCommonStatistics(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - num_non_missing: builtin___float = ... - num_missing: builtin___float = ... - avg_num_values: builtin___float = ... - tot_num_values: builtin___float = ... - - def __init__(self, - *, - num_non_missing : typing___Optional[builtin___float] = None, - num_missing : typing___Optional[builtin___float] = None, - avg_num_values : typing___Optional[builtin___float] = None, - tot_num_values : typing___Optional[builtin___float] = None, - ) -> None: ... - def ClearField(self, field_name: typing_extensions___Literal[u"avg_num_values",b"avg_num_values",u"num_missing",b"num_missing",u"num_non_missing",b"num_non_missing",u"tot_num_values",b"tot_num_values"]) -> None: ... -type___WeightedCommonStatistics = WeightedCommonStatistics - -class CustomStatistic(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - name: typing___Text = ... - num: builtin___float = ... - str: typing___Text = ... - - @property - def histogram(self) -> type___Histogram: ... - - @property - def rank_histogram(self) -> type___RankHistogram: ... - - def __init__(self, - *, - name : typing___Optional[typing___Text] = None, - num : typing___Optional[builtin___float] = None, - str : typing___Optional[typing___Text] = None, - histogram : typing___Optional[type___Histogram] = None, - rank_histogram : typing___Optional[type___RankHistogram] = None, - ) -> None: ... - def HasField(self, field_name: typing_extensions___Literal[u"histogram",b"histogram",u"num",b"num",u"rank_histogram",b"rank_histogram",u"str",b"str",u"val",b"val"]) -> builtin___bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"histogram",b"histogram",u"name",b"name",u"num",b"num",u"rank_histogram",b"rank_histogram",u"str",b"str",u"val",b"val"]) -> None: ... - def WhichOneof(self, oneof_group: typing_extensions___Literal[u"val",b"val"]) -> typing_extensions___Literal["num","str","histogram","rank_histogram"]: ... -type___CustomStatistic = CustomStatistic - -class NumericStatistics(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - mean: builtin___float = ... - std_dev: builtin___float = ... - num_zeros: builtin___int = ... - min: builtin___float = ... - median: builtin___float = ... - max: builtin___float = ... - - @property - def common_stats(self) -> type___CommonStatistics: ... - - @property - def histograms(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[type___Histogram]: ... - - @property - def weighted_numeric_stats(self) -> type___WeightedNumericStatistics: ... - - def __init__(self, - *, - common_stats : typing___Optional[type___CommonStatistics] = None, - mean : typing___Optional[builtin___float] = None, - std_dev : typing___Optional[builtin___float] = None, - num_zeros : typing___Optional[builtin___int] = None, - min : typing___Optional[builtin___float] = None, - median : typing___Optional[builtin___float] = None, - max : typing___Optional[builtin___float] = None, - histograms : typing___Optional[typing___Iterable[type___Histogram]] = None, - weighted_numeric_stats : typing___Optional[type___WeightedNumericStatistics] = None, - ) -> None: ... - def HasField(self, field_name: typing_extensions___Literal[u"common_stats",b"common_stats",u"weighted_numeric_stats",b"weighted_numeric_stats"]) -> builtin___bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"common_stats",b"common_stats",u"histograms",b"histograms",u"max",b"max",u"mean",b"mean",u"median",b"median",u"min",b"min",u"num_zeros",b"num_zeros",u"std_dev",b"std_dev",u"weighted_numeric_stats",b"weighted_numeric_stats"]) -> None: ... -type___NumericStatistics = NumericStatistics - -class StringStatistics(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - class FreqAndValue(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - value: typing___Text = ... - frequency: builtin___float = ... - - def __init__(self, - *, - value : typing___Optional[typing___Text] = None, - frequency : typing___Optional[builtin___float] = None, - ) -> None: ... - def ClearField(self, field_name: typing_extensions___Literal[u"frequency",b"frequency",u"value",b"value"]) -> None: ... - type___FreqAndValue = FreqAndValue - - unique: builtin___int = ... - avg_length: builtin___float = ... - vocabulary_file: typing___Text = ... - - @property - def common_stats(self) -> type___CommonStatistics: ... - - @property - def top_values(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[type___StringStatistics.FreqAndValue]: ... - - @property - def rank_histogram(self) -> type___RankHistogram: ... - - @property - def weighted_string_stats(self) -> type___WeightedStringStatistics: ... - - def __init__(self, - *, - common_stats : typing___Optional[type___CommonStatistics] = None, - unique : typing___Optional[builtin___int] = None, - top_values : typing___Optional[typing___Iterable[type___StringStatistics.FreqAndValue]] = None, - avg_length : typing___Optional[builtin___float] = None, - rank_histogram : typing___Optional[type___RankHistogram] = None, - weighted_string_stats : typing___Optional[type___WeightedStringStatistics] = None, - vocabulary_file : typing___Optional[typing___Text] = None, - ) -> None: ... - def HasField(self, field_name: typing_extensions___Literal[u"common_stats",b"common_stats",u"rank_histogram",b"rank_histogram",u"weighted_string_stats",b"weighted_string_stats"]) -> builtin___bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"avg_length",b"avg_length",u"common_stats",b"common_stats",u"rank_histogram",b"rank_histogram",u"top_values",b"top_values",u"unique",b"unique",u"vocabulary_file",b"vocabulary_file",u"weighted_string_stats",b"weighted_string_stats"]) -> None: ... -type___StringStatistics = StringStatistics - -class WeightedNumericStatistics(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - mean: builtin___float = ... - std_dev: builtin___float = ... - median: builtin___float = ... - - @property - def histograms(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[type___Histogram]: ... - - def __init__(self, - *, - mean : typing___Optional[builtin___float] = None, - std_dev : typing___Optional[builtin___float] = None, - median : typing___Optional[builtin___float] = None, - histograms : typing___Optional[typing___Iterable[type___Histogram]] = None, - ) -> None: ... - def ClearField(self, field_name: typing_extensions___Literal[u"histograms",b"histograms",u"mean",b"mean",u"median",b"median",u"std_dev",b"std_dev"]) -> None: ... -type___WeightedNumericStatistics = WeightedNumericStatistics - -class WeightedStringStatistics(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - - @property - def top_values(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[type___StringStatistics.FreqAndValue]: ... - - @property - def rank_histogram(self) -> type___RankHistogram: ... - - def __init__(self, - *, - top_values : typing___Optional[typing___Iterable[type___StringStatistics.FreqAndValue]] = None, - rank_histogram : typing___Optional[type___RankHistogram] = None, - ) -> None: ... - def HasField(self, field_name: typing_extensions___Literal[u"rank_histogram",b"rank_histogram"]) -> builtin___bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"rank_histogram",b"rank_histogram",u"top_values",b"top_values"]) -> None: ... -type___WeightedStringStatistics = WeightedStringStatistics - -class BytesStatistics(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - unique: builtin___int = ... - avg_num_bytes: builtin___float = ... - min_num_bytes: builtin___float = ... - max_num_bytes: builtin___float = ... - - @property - def common_stats(self) -> type___CommonStatistics: ... - - def __init__(self, - *, - common_stats : typing___Optional[type___CommonStatistics] = None, - unique : typing___Optional[builtin___int] = None, - avg_num_bytes : typing___Optional[builtin___float] = None, - min_num_bytes : typing___Optional[builtin___float] = None, - max_num_bytes : typing___Optional[builtin___float] = None, - ) -> None: ... - def HasField(self, field_name: typing_extensions___Literal[u"common_stats",b"common_stats"]) -> builtin___bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"avg_num_bytes",b"avg_num_bytes",u"common_stats",b"common_stats",u"max_num_bytes",b"max_num_bytes",u"min_num_bytes",b"min_num_bytes",u"unique",b"unique"]) -> None: ... -type___BytesStatistics = BytesStatistics - -class StructStatistics(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - - @property - def common_stats(self) -> type___CommonStatistics: ... - - def __init__(self, - *, - common_stats : typing___Optional[type___CommonStatistics] = None, - ) -> None: ... - def HasField(self, field_name: typing_extensions___Literal[u"common_stats",b"common_stats"]) -> builtin___bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"common_stats",b"common_stats"]) -> None: ... -type___StructStatistics = StructStatistics - -class CommonStatistics(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - num_non_missing: builtin___int = ... - num_missing: builtin___int = ... - min_num_values: builtin___int = ... - max_num_values: builtin___int = ... - avg_num_values: builtin___float = ... - tot_num_values: builtin___int = ... - - @property - def num_values_histogram(self) -> type___Histogram: ... - - @property - def weighted_common_stats(self) -> type___WeightedCommonStatistics: ... - - @property - def feature_list_length_histogram(self) -> type___Histogram: ... - - def __init__(self, - *, - num_non_missing : typing___Optional[builtin___int] = None, - num_missing : typing___Optional[builtin___int] = None, - min_num_values : typing___Optional[builtin___int] = None, - max_num_values : typing___Optional[builtin___int] = None, - avg_num_values : typing___Optional[builtin___float] = None, - tot_num_values : typing___Optional[builtin___int] = None, - num_values_histogram : typing___Optional[type___Histogram] = None, - weighted_common_stats : typing___Optional[type___WeightedCommonStatistics] = None, - feature_list_length_histogram : typing___Optional[type___Histogram] = None, - ) -> None: ... - def HasField(self, field_name: typing_extensions___Literal[u"feature_list_length_histogram",b"feature_list_length_histogram",u"num_values_histogram",b"num_values_histogram",u"weighted_common_stats",b"weighted_common_stats"]) -> builtin___bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"avg_num_values",b"avg_num_values",u"feature_list_length_histogram",b"feature_list_length_histogram",u"max_num_values",b"max_num_values",u"min_num_values",b"min_num_values",u"num_missing",b"num_missing",u"num_non_missing",b"num_non_missing",u"num_values_histogram",b"num_values_histogram",u"tot_num_values",b"tot_num_values",u"weighted_common_stats",b"weighted_common_stats"]) -> None: ... -type___CommonStatistics = CommonStatistics - -class Histogram(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - HistogramTypeValue = typing___NewType('HistogramTypeValue', builtin___int) - type___HistogramTypeValue = HistogramTypeValue - HistogramType: _HistogramType - class _HistogramType(google___protobuf___internal___enum_type_wrapper____EnumTypeWrapper[Histogram.HistogramTypeValue]): - DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ... - STANDARD = typing___cast(Histogram.HistogramTypeValue, 0) - QUANTILES = typing___cast(Histogram.HistogramTypeValue, 1) - STANDARD = typing___cast(Histogram.HistogramTypeValue, 0) - QUANTILES = typing___cast(Histogram.HistogramTypeValue, 1) - - class Bucket(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - low_value: builtin___float = ... - high_value: builtin___float = ... - sample_count: builtin___float = ... - - def __init__(self, - *, - low_value : typing___Optional[builtin___float] = None, - high_value : typing___Optional[builtin___float] = None, - sample_count : typing___Optional[builtin___float] = None, - ) -> None: ... - def ClearField(self, field_name: typing_extensions___Literal[u"high_value",b"high_value",u"low_value",b"low_value",u"sample_count",b"sample_count"]) -> None: ... - type___Bucket = Bucket - - num_nan: builtin___int = ... - num_undefined: builtin___int = ... - type: type___Histogram.HistogramTypeValue = ... - name: typing___Text = ... - - @property - def buckets(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[type___Histogram.Bucket]: ... - - def __init__(self, - *, - num_nan : typing___Optional[builtin___int] = None, - num_undefined : typing___Optional[builtin___int] = None, - buckets : typing___Optional[typing___Iterable[type___Histogram.Bucket]] = None, - type : typing___Optional[type___Histogram.HistogramTypeValue] = None, - name : typing___Optional[typing___Text] = None, - ) -> None: ... - def ClearField(self, field_name: typing_extensions___Literal[u"buckets",b"buckets",u"name",b"name",u"num_nan",b"num_nan",u"num_undefined",b"num_undefined",u"type",b"type"]) -> None: ... -type___Histogram = Histogram - -class RankHistogram(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - class Bucket(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - low_rank: builtin___int = ... - high_rank: builtin___int = ... - label: typing___Text = ... - sample_count: builtin___float = ... - - def __init__(self, - *, - low_rank : typing___Optional[builtin___int] = None, - high_rank : typing___Optional[builtin___int] = None, - label : typing___Optional[typing___Text] = None, - sample_count : typing___Optional[builtin___float] = None, - ) -> None: ... - def ClearField(self, field_name: typing_extensions___Literal[u"high_rank",b"high_rank",u"label",b"label",u"low_rank",b"low_rank",u"sample_count",b"sample_count"]) -> None: ... - type___Bucket = Bucket - - name: typing___Text = ... - - @property - def buckets(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[type___RankHistogram.Bucket]: ... - - def __init__(self, - *, - buckets : typing___Optional[typing___Iterable[type___RankHistogram.Bucket]] = None, - name : typing___Optional[typing___Text] = None, - ) -> None: ... - def ClearField(self, field_name: typing_extensions___Literal[u"buckets",b"buckets",u"name",b"name"]) -> None: ... -type___RankHistogram = RankHistogram diff --git a/sdk/python/tests/__init__.py b/sdk/python/tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/tests/benchmarks/test_benchmark_universal_online_retrieval.py b/sdk/python/tests/benchmarks/test_benchmark_universal_online_retrieval.py new file mode 100644 index 0000000000..a29383a5c9 --- /dev/null +++ b/sdk/python/tests/benchmarks/test_benchmark_universal_online_retrieval.py @@ -0,0 +1,63 @@ +import random +from typing import List + +import pytest + +from feast import FeatureService +from feast.feast_object import FeastObject +from tests.integration.feature_repos.repo_configuration import ( + construct_universal_feature_views, +) +from tests.integration.feature_repos.universal.entities import ( + customer, + driver, + location, +) + + +@pytest.mark.benchmark +@pytest.mark.integration +def test_online_retrieval(environment, universal_data_sources, benchmark): + fs = environment.feature_store + entities, datasets, data_sources = universal_data_sources + feature_views = construct_universal_feature_views(data_sources) + + feature_service = FeatureService( + "convrate_plus100", + features=[feature_views.driver[["conv_rate"]], feature_views.driver_odfv], + ) + + feast_objects: List[FeastObject] = [] + feast_objects.extend(feature_views.values()) + feast_objects.extend([driver(), customer(), location(), feature_service]) + fs.apply(feast_objects) + fs.materialize(environment.start_date, environment.end_date) + + sample_drivers = random.sample(entities.driver_vals, 10) + + sample_customers = random.sample(entities.customer_vals, 10) + + entity_rows = [ + {"driver_id": d, "customer_id": c, "val_to_add": 50} + for (d, c) in zip(sample_drivers, sample_customers) + ] + + feature_refs = [ + "driver_stats:conv_rate", + "driver_stats:avg_daily_trips", + "customer_profile:current_balance", + "customer_profile:avg_passenger_count", + "customer_profile:lifetime_trip_count", + "conv_rate_plus_100:conv_rate_plus_100", + "conv_rate_plus_100:conv_rate_plus_val_to_add", + "global_stats:num_rides", + "global_stats:avg_ride_length", + ] + unprefixed_feature_refs = [f.rsplit(":", 1)[-1] for f in feature_refs if ":" in f] + # Remove the on demand feature view output features, since they're not present in the source dataframe + unprefixed_feature_refs.remove("conv_rate_plus_100") + unprefixed_feature_refs.remove("conv_rate_plus_val_to_add") + + benchmark( + fs.get_online_features, features=feature_refs, entity_rows=entity_rows, + ) diff --git a/sdk/python/tests/conftest.py b/sdk/python/tests/conftest.py index 0c94f4d57a..c72a3af754 100644 --- a/sdk/python/tests/conftest.py +++ b/sdk/python/tests/conftest.py @@ -1,4 +1,4 @@ -# Copyright 2019 The Feast Authors +# Copyright 2021 The Feast Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,12 +11,37 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import logging import multiprocessing +import pathlib +import time from datetime import datetime, timedelta +from multiprocessing import Process from sys import platform +from typing import List import pandas as pd import pytest +from _pytest.nodes import Item +from testcontainers.core.container import DockerContainer +from testcontainers.core.waiting_utils import wait_for_logs + +from feast import FeatureStore +from tests.data.data_creator import create_dataset +from tests.integration.feature_repos.integration_test_repo_config import ( + IntegrationTestRepoConfig, +) +from tests.integration.feature_repos.repo_configuration import ( + FULL_REPO_CONFIGS, + REDIS_CLUSTER_CONFIG, + REDIS_CONFIG, + Environment, + TestData, + construct_test_environment, + construct_universal_test_data, +) + +logger = logging.getLogger(__name__) def pytest_configure(config): @@ -27,6 +52,13 @@ def pytest_configure(config): config.addinivalue_line( "markers", "integration: mark test that has external dependencies" ) + config.addinivalue_line("markers", "benchmark: mark benchmarking tests") + config.addinivalue_line( + "markers", "universal: mark tests that use the universal feature repo" + ) + config.addinivalue_line( + "markers", "goserver: mark tests that use the go feature server" + ) def pytest_addoption(parser): @@ -36,17 +68,55 @@ def pytest_addoption(parser): default=False, help="Run tests with external dependencies", ) + parser.addoption( + "--benchmark", action="store_true", default=False, help="Run benchmark tests", + ) + parser.addoption( + "--universal", action="store_true", default=False, help="Run universal tests", + ) + parser.addoption( + "--goserver", + action="store_true", + default=False, + help="Run tests that use the go feature server", + ) -def pytest_collection_modifyitems(config, items): - if config.getoption("--integration"): - return - skip_integration = pytest.mark.skip( - reason="not running tests with external dependencies" - ) - for item in items: - if "integration" in item.keywords: - item.add_marker(skip_integration) +def pytest_collection_modifyitems(config, items: List[Item]): + should_run_integration = config.getoption("--integration") is True + should_run_benchmark = config.getoption("--benchmark") is True + should_run_universal = config.getoption("--universal") is True + should_run_goserver = config.getoption("--goserver") is True + + integration_tests = [t for t in items if "integration" in t.keywords] + if not should_run_integration: + for t in integration_tests: + items.remove(t) + else: + items.clear() + for t in integration_tests: + items.append(t) + + benchmark_tests = [t for t in items if "benchmark" in t.keywords] + if not should_run_benchmark: + for t in benchmark_tests: + items.remove(t) + else: + items.clear() + for t in benchmark_tests: + items.append(t) + + universal_tests = [t for t in items if "universal" in t.keywords] + if should_run_universal: + items.clear() + for t in universal_tests: + items.append(t) + + goserver_tests = [t for t in items if "goserver" in t.keywords] + if should_run_goserver: + items.clear() + for t in goserver_tests: + items.append(t) @pytest.fixture @@ -54,7 +124,7 @@ def simple_dataset_1() -> pd.DataFrame: now = datetime.utcnow() ts = pd.Timestamp(now).round("ms") data = { - "id": [1, 2, 1, 3, 3], + "id_join_key": [1, 2, 1, 3, 3], "float_col": [0.1, 0.2, 0.3, 4, 5], "int64_col": [1, 2, 3, 4, 5], "string_col": ["a", "b", "c", "d", "e"], @@ -74,7 +144,7 @@ def simple_dataset_2() -> pd.DataFrame: now = datetime.utcnow() ts = pd.Timestamp(now).round("ms") data = { - "id": ["a", "b", "c", "d", "e"], + "id_join_key": ["a", "b", "c", "d", "e"], "float_col": [0.1, 0.2, 0.3, 4, 5], "int64_col": [1, 2, 3, 4, 5], "string_col": ["a", "b", "c", "d", "e"], @@ -87,3 +157,127 @@ def simple_dataset_2() -> pd.DataFrame: ], } return pd.DataFrame.from_dict(data) + + +def start_test_local_server(repo_path: str, port: int): + fs = FeatureStore(repo_path) + fs.serve("localhost", port, no_access_log=True) + + +class TrinoContainerSingleton: + current_file = pathlib.Path(__file__).resolve() + catalog_dir = current_file.parent.joinpath( + "integration/feature_repos/universal/data_sources/catalog" + ) + container = None + is_running = False + + @classmethod + def get_singleton(cls): + if not cls.is_running: + cls.container = ( + DockerContainer("trinodb/trino:376") + .with_volume_mapping(cls.catalog_dir, "/etc/catalog/") + .with_exposed_ports("8080") + ) + + cls.container.start() + log_string_to_wait_for = "SERVER STARTED" + wait_for_logs( + container=cls.container, predicate=log_string_to_wait_for, timeout=30 + ) + cls.is_running = True + return cls.container + + @classmethod + def teardown(cls): + if cls.container: + cls.container.stop() + + +@pytest.fixture( + params=FULL_REPO_CONFIGS, scope="session", ids=[str(c) for c in FULL_REPO_CONFIGS] +) +def environment(request, worker_id: str): + if "TrinoSourceCreator" in request.param.offline_store_creator.__name__: + e = construct_test_environment( + request.param, + worker_id=worker_id, + offline_container=TrinoContainerSingleton.get_singleton(), + ) + else: + e = construct_test_environment(request.param, worker_id=worker_id) + proc = Process( + target=start_test_local_server, + args=(e.feature_store.repo_path, e.get_local_server_port()), + daemon=True, + ) + if e.python_feature_server and e.test_repo_config.provider == "local": + proc.start() + # Wait for server to start + time.sleep(3) + + def cleanup(): + e.feature_store.teardown() + if proc.is_alive(): + proc.kill() + if e.online_store_creator: + e.online_store_creator.teardown() + + request.addfinalizer(cleanup) + + return e + + +@pytest.fixture( + params=[REDIS_CONFIG, REDIS_CLUSTER_CONFIG], + scope="session", + ids=[str(c) for c in [REDIS_CONFIG, REDIS_CLUSTER_CONFIG]], +) +def local_redis_environment(request, worker_id): + e = construct_test_environment( + IntegrationTestRepoConfig(online_store=request.param), worker_id=worker_id + ) + + def cleanup(): + e.feature_store.teardown() + + request.addfinalizer(cleanup) + return e + + +@pytest.fixture(scope="session") +def universal_data_sources(request, environment) -> TestData: + def cleanup(): + # logger.info("Running cleanup in %s, Request: %s", worker_id, request.param) + environment.data_source_creator.teardown() + + request.addfinalizer(cleanup) + return construct_universal_test_data(environment) + + +@pytest.fixture(scope="session") +def redis_universal_data_sources(request, local_redis_environment): + def cleanup(): + # logger.info("Running cleanup in %s, Request: %s", worker_id, request.param) + local_redis_environment.data_source_creator.teardown() + + request.addfinalizer(cleanup) + return construct_universal_test_data(local_redis_environment) + + +@pytest.fixture(scope="session") +def e2e_data_sources(environment: Environment, request): + df = create_dataset() + data_source = environment.data_source_creator.create_data_source( + df, environment.feature_store.project, field_mapping={"ts_1": "ts"}, + ) + + def cleanup(): + environment.data_source_creator.teardown() + if environment.online_store_creator: + environment.online_store_creator.teardown() + + request.addfinalizer(cleanup) + + return df, data_source diff --git a/sdk/python/tests/data/__init__.py b/sdk/python/tests/data/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/tests/data/data_creator.py b/sdk/python/tests/data/data_creator.py new file mode 100644 index 0000000000..e08597b67b --- /dev/null +++ b/sdk/python/tests/data/data_creator.py @@ -0,0 +1,79 @@ +from datetime import datetime, timedelta +from typing import Dict, List, Optional + +import pandas as pd +from pytz import timezone, utc + +from feast.value_type import ValueType + + +def create_dataset( + entity_type: ValueType = ValueType.INT32, + feature_dtype: str = None, + feature_is_list: bool = False, + list_has_empty_list: bool = False, +) -> pd.DataFrame: + now = datetime.utcnow().replace(microsecond=0, second=0, minute=0) + ts = pd.Timestamp(now).round("ms") + data = { + "driver_id": get_entities_for_value_type(entity_type), + "value": get_feature_values_for_dtype( + feature_dtype, feature_is_list, list_has_empty_list + ), + "ts_1": [ + ts - timedelta(hours=4), + ts, + ts - timedelta(hours=3), + # Use different time zones to test tz-naive -> tz-aware conversion + (ts - timedelta(hours=4)) + .replace(tzinfo=utc) + .astimezone(tz=timezone("Europe/Berlin")), + (ts - timedelta(hours=1)) + .replace(tzinfo=utc) + .astimezone(tz=timezone("US/Pacific")), + ], + "created_ts": [ts, ts, ts, ts, ts], + } + return pd.DataFrame.from_dict(data) + + +def get_entities_for_value_type(value_type: ValueType) -> List: + value_type_map: Dict[ValueType, List] = { + ValueType.INT32: [1, 2, 1, 3, 3], + ValueType.INT64: [1, 2, 1, 3, 3], + ValueType.FLOAT: [1.0, 2.0, 1.0, 3.0, 3.0], + ValueType.STRING: ["1", "2", "1", "3", "3"], + } + return value_type_map[value_type] + + +def get_feature_values_for_dtype( + dtype: Optional[str], is_list: bool, has_empty_list: bool +) -> List: + if dtype is None: + return [0.1, None, 0.3, 4, 5] + # TODO(adchia): for int columns, consider having a better error when dealing with None values (pandas int dfs can't + # have na) + dtype_map: Dict[str, List] = { + "int32": [1, 2, 3, 4, 5], + "int64": [1, 2, 3, 4, 5], + "float": [1.0, None, 3.0, 4.0, 5.0], + "string": ["1", None, "3", "4", "5"], + "bool": [True, None, False, True, False], + "datetime": [ + datetime(1980, 1, 1), + None, + datetime(1981, 1, 1), + datetime(1982, 1, 1), + datetime(1982, 1, 1), + ], + } + non_list_val = dtype_map[dtype] + if is_list: + # TODO: Add test where all lists are empty and type inference is expected to fail. + if has_empty_list: + # Need at least one non-empty element for type inference + return [[] for n in non_list_val[:-1]] + [non_list_val[-1:]] + return [[n, n] if n is not None else None for n in non_list_val] + else: + return non_list_val diff --git a/sdk/python/tests/doctest/test_all.py b/sdk/python/tests/doctest/test_all.py new file mode 100644 index 0000000000..65d5f3da28 --- /dev/null +++ b/sdk/python/tests/doctest/test_all.py @@ -0,0 +1,110 @@ +import doctest +import importlib +import pkgutil +import sys +import unittest + +import feast + + +def setup_feature_store(): + """Prepares the local environment for a FeatureStore docstring test.""" + from datetime import datetime, timedelta + + from feast import Entity, FeatureStore, FeatureView, Field, FileSource, ValueType + from feast.repo_operations import init_repo + from feast.types import Float32, Int64 + + init_repo("feature_repo", "local") + fs = FeatureStore(repo_path="feature_repo") + driver = Entity( + name="driver_id", value_type=ValueType.INT64, description="driver id", + ) + driver_hourly_stats = FileSource( + path="feature_repo/data/driver_stats.parquet", + timestamp_field="event_timestamp", + created_timestamp_column="created", + ) + driver_hourly_stats_view = FeatureView( + name="driver_hourly_stats", + entities=["driver_id"], + ttl=timedelta(seconds=86400 * 1), + schema=[ + Field(name="conv_rate", dtype=Float32), + Field(name="acc_rate", dtype=Float32), + Field(name="avg_daily_trips", dtype=Int64), + ], + batch_source=driver_hourly_stats, + ) + fs.apply([driver_hourly_stats_view, driver]) + fs.materialize( + start_date=datetime.utcnow() - timedelta(hours=3), + end_date=datetime.utcnow() - timedelta(minutes=10), + ) + + +def teardown_feature_store(): + """Cleans up the local environment after a FeatureStore docstring test.""" + import shutil + + shutil.rmtree("feature_repo", ignore_errors=True) + + +def test_docstrings(): + """Runs all docstring tests. + + Imports all submodules of the feast package. Checks the submodules for docstring + tests and runs them. Setup functions for a submodule named "feast.x.y.z" should be + defined in this module as a function named "setup_x_y_z". Teardown functions can be + defined similarly. Setup and teardown functions are per-submodule. + """ + successful = True + current_packages = [feast] + failed_cases = [] + + while current_packages: + next_packages = [] + + for package in current_packages: + for _, name, is_pkg in pkgutil.walk_packages(package.__path__): + full_name = package.__name__ + "." + name + + try: + temp_module = importlib.import_module(full_name) + if is_pkg: + next_packages.append(temp_module) + except ModuleNotFoundError: + pass + + # Retrieve the setup and teardown functions defined in this file. + relative_path_from_feast = full_name.split(".", 1)[1] + function_suffix = relative_path_from_feast.replace(".", "_") + setup_function_name = "setup_" + function_suffix + teardown_function_name = "teardown_" + function_suffix + setup_function = globals().get(setup_function_name) + teardown_function = globals().get(teardown_function_name) + + # Execute the test with setup and teardown functions. + try: + if setup_function: + setup_function() + + test_suite = doctest.DocTestSuite( + temp_module, optionflags=doctest.ELLIPSIS, + ) + if test_suite.countTestCases() > 0: + result = unittest.TextTestRunner(sys.stdout).run(test_suite) + if not result.wasSuccessful(): + successful = False + failed_cases.append(result.failures) + except Exception as e: + successful = False + failed_cases.append((full_name, e)) + finally: + if teardown_function: + teardown_function() + + current_packages = next_packages + + if not successful: + raise Exception(f"Docstring tests failed. Failed results: {failed_cases}") diff --git a/sdk/python/tests/example_repos/example_feature_repo_1.py b/sdk/python/tests/example_repos/example_feature_repo_1.py index 808e8ed54e..d8b6d7c89b 100644 --- a/sdk/python/tests/example_repos/example_feature_repo_1.py +++ b/sdk/python/tests/example_repos/example_feature_repo_1.py @@ -1,30 +1,58 @@ from datetime import timedelta -from feast import BigQuerySource, Entity, Feature, FeatureView, ValueType +from feast import ( + BigQuerySource, + Entity, + FeatureService, + FeatureView, + Field, + PushSource, + ValueType, +) +from feast.types import Float32, Int64, String driver_locations_source = BigQuerySource( - table_ref="feast-oss.public.drivers", - event_timestamp_column="event_timestamp", + table="feast-oss.public.drivers", + timestamp_field="event_timestamp", + created_timestamp_column="created_timestamp", +) + +driver_locations_source_query = BigQuerySource( + query="SELECT * from feast-oss.public.drivers", + timestamp_field="event_timestamp", + created_timestamp_column="created_timestamp", +) + +driver_locations_source_query_2 = BigQuerySource( + query="SELECT lat * 2 FROM feast-oss.public.drivers", + timestamp_field="event_timestamp", created_timestamp_column="created_timestamp", ) customer_profile_source = BigQuerySource( - table_ref="feast-oss.public.customers", event_timestamp_column="event_timestamp", + name="customer_profile_source", + table="feast-oss.public.customers", + timestamp_field="event_timestamp", ) customer_driver_combined_source = BigQuerySource( - table_ref="feast-oss.public.customer_driver", - event_timestamp_column="event_timestamp", + table="feast-oss.public.customer_driver", timestamp_field="event_timestamp", +) + +driver_locations_push_source = PushSource( + name="driver_locations_push", batch_source=driver_locations_source, ) driver = Entity( name="driver", # The name is derived from this argument, not object name. + join_keys=["driver_id"], value_type=ValueType.INT64, description="driver id", ) customer = Entity( name="customer", # The name is derived from this argument, not object name. + join_keys=["customer_id"], value_type=ValueType.STRING, ) @@ -33,12 +61,22 @@ name="driver_locations", entities=["driver"], ttl=timedelta(days=1), - features=[ - Feature(name="lat", dtype=ValueType.FLOAT), - Feature(name="lon", dtype=ValueType.STRING), + schema=[Field(name="lat", dtype=Float32), Field(name="lon", dtype=String)], + online=True, + batch_source=driver_locations_source, + tags={}, +) + +pushed_driver_locations = FeatureView( + name="pushed_driver_locations", + entities=["driver"], + ttl=timedelta(days=1), + schema=[ + Field(name="driver_lat", dtype=Float32), + Field(name="driver_long", dtype=String), ], online=True, - input=driver_locations_source, + stream_source=driver_locations_push_source, tags={}, ) @@ -46,13 +84,13 @@ name="customer_profile", entities=["customer"], ttl=timedelta(days=1), - features=[ - Feature(name="avg_orders_day", dtype=ValueType.FLOAT), - Feature(name="name", dtype=ValueType.STRING), - Feature(name="age", dtype=ValueType.INT64), + schema=[ + Field(name="avg_orders_day", dtype=Float32), + Field(name="name", dtype=String), + Field(name="age", dtype=Int64), ], online=True, - input=customer_profile_source, + batch_source=customer_profile_source, tags={}, ) @@ -60,8 +98,15 @@ name="customer_driver_combined", entities=["customer", "driver"], ttl=timedelta(days=1), - features=[Feature(name="trips", dtype=ValueType.INT64)], + schema=[Field(name="trips", dtype=Int64)], online=True, - input=customer_driver_combined_source, + batch_source=customer_driver_combined_source, tags={}, ) + + +all_drivers_feature_service = FeatureService( + name="driver_locations_service", + features=[driver_locations], + tags={"release": "production"}, +) diff --git a/sdk/python/tests/example_repos/example_feature_repo_2.py b/sdk/python/tests/example_repos/example_feature_repo_2.py index 420d71de0a..1ca7cc3805 100644 --- a/sdk/python/tests/example_repos/example_feature_repo_2.py +++ b/sdk/python/tests/example_repos/example_feature_repo_2.py @@ -1,10 +1,11 @@ -from google.protobuf.duration_pb2 import Duration +from datetime import timedelta -from feast import Entity, Feature, FeatureView, FileSource, ValueType +from feast import Entity, FeatureView, Field, FileSource, ValueType +from feast.types import Float32, Int32, Int64 driver_hourly_stats = FileSource( path="%PARQUET_PATH%", # placeholder to be replaced by the test - event_timestamp_column="datetime", + timestamp_field="event_timestamp", created_timestamp_column="created", ) @@ -14,13 +15,34 @@ driver_hourly_stats_view = FeatureView( name="driver_hourly_stats", entities=["driver_id"], - ttl=Duration(seconds=86400 * 1), - features=[ - Feature(name="conv_rate", dtype=ValueType.FLOAT), - Feature(name="acc_rate", dtype=ValueType.FLOAT), - Feature(name="avg_daily_trips", dtype=ValueType.INT64), + ttl=timedelta(days=1), + schema=[ + Field(name="conv_rate", dtype=Float32), + Field(name="acc_rate", dtype=Float32), + Field(name="avg_daily_trips", dtype=Int64), ], online=True, - input=driver_hourly_stats, + batch_source=driver_hourly_stats, + tags={}, +) + + +global_daily_stats = FileSource( + path="%PARQUET_PATH_GLOBAL%", # placeholder to be replaced by the test + timestamp_field="event_timestamp", + created_timestamp_column="created", +) + + +global_stats_feature_view = FeatureView( + name="global_daily_stats", + entities=[], + ttl=timedelta(days=1), + schema=[ + Field(name="num_rides", dtype=Int32), + Field(name="avg_ride_length", dtype=Float32), + ], + online=True, + batch_source=global_daily_stats, tags={}, ) diff --git a/sdk/python/tests/example_repos/example_feature_repo_with_duplicated_featureview_names.py b/sdk/python/tests/example_repos/example_feature_repo_with_duplicated_featureview_names.py new file mode 100644 index 0000000000..cbcc3ad172 --- /dev/null +++ b/sdk/python/tests/example_repos/example_feature_repo_with_duplicated_featureview_names.py @@ -0,0 +1,25 @@ +from datetime import timedelta + +from feast import FeatureView, FileSource + +driver_hourly_stats = FileSource( + path="driver_stats.parquet", # this parquet is not real and will not be read +) + +driver_hourly_stats_view = FeatureView( + name="driver_hourly_stats", # Intentionally use the same FeatureView name + entities=["driver_id"], + online=False, + source=driver_hourly_stats, + ttl=timedelta(days=1), + tags={}, +) + +driver_hourly_stats_view_dup1 = FeatureView( + name="driver_hourly_stats", # Intentionally use the same FeatureView name + entities=["driver_id"], + online=False, + source=driver_hourly_stats, + ttl=timedelta(days=1), + tags={}, +) diff --git a/sdk/python/tests/example_repos/example_feature_repo_with_entity_join_key.py b/sdk/python/tests/example_repos/example_feature_repo_with_entity_join_key.py index 10be18ca2e..ba18cf84ba 100644 --- a/sdk/python/tests/example_repos/example_feature_repo_with_entity_join_key.py +++ b/sdk/python/tests/example_repos/example_feature_repo_with_entity_join_key.py @@ -1,10 +1,11 @@ -from google.protobuf.duration_pb2 import Duration +from datetime import timedelta -from feast import Entity, Feature, FeatureView, FileSource, ValueType +from feast import Entity, FeatureView, Field, FileSource, ValueType +from feast.types import Float32, Int64 driver_hourly_stats = FileSource( path="%PARQUET_PATH%", # placeholder to be replaced by the test - event_timestamp_column="datetime", + timestamp_field="event_timestamp", created_timestamp_column="created", ) @@ -14,20 +15,20 @@ name="driver_id", value_type=ValueType.INT64, description="driver id", - join_key="driver", + join_keys=["driver"], ) driver_hourly_stats_view = FeatureView( name="driver_hourly_stats", entities=["driver_id"], - ttl=Duration(seconds=86400 * 1), - features=[ - Feature(name="conv_rate", dtype=ValueType.FLOAT), - Feature(name="acc_rate", dtype=ValueType.FLOAT), - Feature(name="avg_daily_trips", dtype=ValueType.INT64), + ttl=timedelta(days=1), + schema=[ + Field(name="conv_rate", dtype=Float32), + Field(name="acc_rate", dtype=Float32), + Field(name="avg_daily_trips", dtype=Int64), ], online=True, - input=driver_hourly_stats, + batch_source=driver_hourly_stats, tags={}, ) diff --git a/sdk/python/tests/example_repos/example_feature_repo_with_inference.py b/sdk/python/tests/example_repos/example_feature_repo_with_inference.py deleted file mode 100644 index b46519b468..0000000000 --- a/sdk/python/tests/example_repos/example_feature_repo_with_inference.py +++ /dev/null @@ -1,20 +0,0 @@ -from google.protobuf.duration_pb2 import Duration - -from feast import Entity, FeatureView, FileSource - -driver_hourly_stats = FileSource( - path="%PARQUET_PATH%", # placeholder to be replaced by the test - created_timestamp_column="created", -) - -driver = Entity(name="driver_id", description="driver id",) - -# features are inferred from columns of data source -driver_hourly_stats_view = FeatureView( - name="driver_hourly_stats", - entities=["driver_id"], - ttl=Duration(seconds=86400 * 1), - online=True, - input=driver_hourly_stats, - tags={}, -) diff --git a/sdk/python/tests/example_repos/example_feature_repo_with_missing_bq_source.py b/sdk/python/tests/example_repos/example_feature_repo_with_missing_bq_source.py deleted file mode 100644 index 3d1dc3394b..0000000000 --- a/sdk/python/tests/example_repos/example_feature_repo_with_missing_bq_source.py +++ /dev/null @@ -1,20 +0,0 @@ -from datetime import timedelta - -from feast import BigQuerySource, Entity, Feature, FeatureView, ValueType - -nonexistent_source = BigQuerySource( - table_ref="project.dataset.nonexistent_table", event_timestamp_column="" -) - -driver = Entity(name="driver", value_type=ValueType.INT64, description="driver id",) - -nonexistent_features = FeatureView( - name="driver_locations", - entities=["driver"], - ttl=timedelta(days=1), - features=[ - Feature(name="lat", dtype=ValueType.FLOAT), - Feature(name="lon", dtype=ValueType.STRING), - ], - input=nonexistent_source, -) diff --git a/sdk/python/tests/example_repos/example_repo_duplicate_data_source_names.py b/sdk/python/tests/example_repos/example_repo_duplicate_data_source_names.py new file mode 100644 index 0000000000..5ec08b7182 --- /dev/null +++ b/sdk/python/tests/example_repos/example_repo_duplicate_data_source_names.py @@ -0,0 +1,9 @@ +from feast import FileSource + +driver_hourly_stats = FileSource( + path="driver_stats.parquet", # this parquet is not real and will not be read +) + +driver_hourly_stats_clone = FileSource( + path="driver_stats.parquet", # this parquet is not real and will not be read +) diff --git a/sdk/python/tests/foo_provider.py b/sdk/python/tests/foo_provider.py index a5d396a458..1d4ce7d6cb 100644 --- a/sdk/python/tests/foo_provider.py +++ b/sdk/python/tests/foo_provider.py @@ -4,20 +4,24 @@ import pandas from tqdm import tqdm -from feast import Entity, FeatureTable, FeatureView, RepoConfig +from feast import Entity, FeatureView, RepoConfig from feast.infra.offline_stores.offline_store import RetrievalJob from feast.infra.provider import Provider from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto from feast.protos.feast.types.Value_pb2 import Value as ValueProto from feast.registry import Registry +from feast.saved_dataset import SavedDataset class FooProvider(Provider): + def __init__(self, config: RepoConfig): + pass + def update_infra( self, project: str, - tables_to_delete: Sequence[Union[FeatureTable, FeatureView]], - tables_to_keep: Sequence[Union[FeatureTable, FeatureView]], + tables_to_delete: Sequence[FeatureView], + tables_to_keep: Sequence[FeatureView], entities_to_delete: Sequence[Entity], entities_to_keep: Sequence[Entity], partial: bool, @@ -25,17 +29,14 @@ def update_infra( pass def teardown_infra( - self, - project: str, - tables: Sequence[Union[FeatureTable, FeatureView]], - entities: Sequence[Entity], + self, project: str, tables: Sequence[FeatureView], entities: Sequence[Entity], ): pass def online_write_batch( self, config: RepoConfig, - table: Union[FeatureTable, FeatureView], + table: FeatureView, data: List[ Tuple[EntityKeyProto, Dict[str, ValueProto], datetime, Optional[datetime]] ], @@ -70,11 +71,11 @@ def get_historical_features( def online_read( self, config: RepoConfig, - table: Union[FeatureTable, FeatureView], + table: FeatureView, entity_keys: List[EntityKeyProto], requested_features: List[str] = None, ) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]: pass - def __init__(self, config, repo_path): + def retrieve_saved_dataset(self, config: RepoConfig, dataset: SavedDataset): pass diff --git a/sdk/python/tests/foo_registry_store.py b/sdk/python/tests/foo_registry_store.py new file mode 100644 index 0000000000..31fb653e9b --- /dev/null +++ b/sdk/python/tests/foo_registry_store.py @@ -0,0 +1,20 @@ +from pathlib import Path + +from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto +from feast.registry_store import RegistryStore +from feast.repo_config import RegistryConfig + + +class FooRegistryStore(RegistryStore): + def __init__(self, registry_config: RegistryConfig, repo_path: Path) -> None: + super().__init__() + self.registry_proto = RegistryProto() + + def get_registry_proto(self): + return self.registry_proto + + def update_registry_proto(self, registry_proto: RegistryProto): + self.registry_proto = registry_proto + + def teardown(self): + pass diff --git a/sdk/python/tests/integration/e2e/test_python_feature_server.py b/sdk/python/tests/integration/e2e/test_python_feature_server.py new file mode 100644 index 0000000000..a3048300a3 --- /dev/null +++ b/sdk/python/tests/integration/e2e/test_python_feature_server.py @@ -0,0 +1,121 @@ +import contextlib +import json +from datetime import datetime +from typing import List + +import pytest +from fastapi.testclient import TestClient + +from feast.feast_object import FeastObject +from feast.feature_server import get_app +from tests.integration.feature_repos.integration_test_repo_config import ( + IntegrationTestRepoConfig, +) +from tests.integration.feature_repos.repo_configuration import ( + construct_test_environment, + construct_universal_feature_views, + construct_universal_test_data, +) +from tests.integration.feature_repos.universal.entities import ( + customer, + driver, + location, +) + + +@pytest.mark.integration +@pytest.mark.universal +def test_get_online_features(): + with setup_python_fs_client() as client: + request_data_dict = { + "features": [ + "driver_stats:conv_rate", + "driver_stats:acc_rate", + "driver_stats:avg_daily_trips", + ], + "entities": {"driver_id": [5001, 5002]}, + } + response = client.post( + "/get-online-features", data=json.dumps(request_data_dict) + ) + + # Check entities and features are present + parsed_response = json.loads(response.text) + assert "metadata" in parsed_response + metadata = parsed_response["metadata"] + expected_features = ["driver_id", "conv_rate", "acc_rate", "avg_daily_trips"] + response_feature_names = metadata["feature_names"] + assert len(response_feature_names) == len(expected_features) + for expected_feature in expected_features: + assert expected_feature in response_feature_names + assert "results" in parsed_response + results = parsed_response["results"] + for result in results: + # Same order as in metadata + assert len(result["statuses"]) == 2 # Requested two entities + for status in result["statuses"]: + assert status == "PRESENT" + results_driver_id_index = response_feature_names.index("driver_id") + assert ( + results[results_driver_id_index]["values"] + == request_data_dict["entities"]["driver_id"] + ) + + +@pytest.mark.integration +@pytest.mark.universal +def test_push(): + with setup_python_fs_client() as client: + initial_temp = get_temperatures(client, location_ids=[1])[0] + json_data = json.dumps( + { + "push_source_name": "location_stats_push_source", + "df": { + "location_id": [1], + "temperature": [initial_temp * 100], + "event_timestamp": [str(datetime.utcnow())], + "created": [str(datetime.utcnow())], + }, + } + ) + response = client.post("/push", data=json_data,) + + # Check new pushed temperature is fetched + assert response.status_code == 200 + assert get_temperatures(client, location_ids=[1]) == [initial_temp * 100] + + +def get_temperatures(client, location_ids: List[int]): + get_request_data = { + "features": ["pushable_location_stats:temperature"], + "entities": {"location_id": location_ids}, + } + response = client.post("/get-online-features", data=json.dumps(get_request_data)) + parsed_response = json.loads(response.text) + assert "metadata" in parsed_response + metadata = parsed_response["metadata"] + response_feature_names = metadata["feature_names"] + assert "results" in parsed_response + results = parsed_response["results"] + results_temperature_index = response_feature_names.index("temperature") + return results[results_temperature_index]["values"] + + +@contextlib.contextmanager +def setup_python_fs_client(): + config = IntegrationTestRepoConfig() + environment = construct_test_environment(config) + fs = environment.feature_store + try: + entities, datasets, data_sources = construct_universal_test_data(environment) + feature_views = construct_universal_feature_views(data_sources) + feast_objects: List[FeastObject] = [] + feast_objects.extend(feature_views.values()) + feast_objects.extend([driver(), customer(), location()]) + fs.apply(feast_objects) + fs.materialize(environment.start_date, environment.end_date) + client = TestClient(get_app(fs)) + yield client + finally: + fs.teardown() + environment.data_source_creator.teardown() diff --git a/sdk/python/tests/integration/e2e/test_universal_e2e.py b/sdk/python/tests/integration/e2e/test_universal_e2e.py new file mode 100644 index 0000000000..957cf9fba6 --- /dev/null +++ b/sdk/python/tests/integration/e2e/test_universal_e2e.py @@ -0,0 +1,159 @@ +import math +from datetime import datetime, timedelta +from typing import Optional + +import pandas as pd +import pytest +from pytz import utc + +from feast import FeatureStore, FeatureView +from tests.integration.feature_repos.universal.entities import driver +from tests.integration.feature_repos.universal.feature_views import driver_feature_view + + +@pytest.mark.integration +@pytest.mark.universal +@pytest.mark.parametrize("infer_features", [True, False]) +def test_e2e_consistency(environment, e2e_data_sources, infer_features): + fs = environment.feature_store + df, data_source = e2e_data_sources + fv = driver_feature_view( + name=f"test_consistency_{'with_inference' if infer_features else ''}", + data_source=data_source, + infer_features=infer_features, + ) + + entity = driver() + fs.apply([fv, entity]) + + # materialization is run in two steps and + # we use timestamp from generated dataframe as a split point + split_dt = df["ts_1"][4].to_pydatetime() - timedelta(seconds=1) + + run_offline_online_store_consistency_test(fs, fv, split_dt) + + +def check_offline_and_online_features( + fs: FeatureStore, + fv: FeatureView, + driver_id: int, + event_timestamp: datetime, + expected_value: Optional[float], + full_feature_names: bool, + check_offline_store: bool = True, +) -> None: + # Check online store + response_dict = fs.get_online_features( + [f"{fv.name}:value"], + [{"driver_id": driver_id}], + full_feature_names=full_feature_names, + ).to_dict() + + if full_feature_names: + if expected_value: + assert ( + abs(response_dict[f"{fv.name}__value"][0] - expected_value) < 1e-6 + ), f"Response: {response_dict}, Expected: {expected_value}" + else: + assert response_dict[f"{fv.name}__value"][0] is None + else: + if expected_value: + assert ( + abs(response_dict["value"][0] - expected_value) < 1e-6 + ), f"Response: {response_dict}, Expected: {expected_value}" + else: + assert response_dict["value"][0] is None + + # Check offline store + if check_offline_store: + df = fs.get_historical_features( + entity_df=pd.DataFrame.from_dict( + {"driver_id": [driver_id], "event_timestamp": [event_timestamp]} + ), + features=[f"{fv.name}:value"], + full_feature_names=full_feature_names, + ).to_df() + + if full_feature_names: + if expected_value: + assert ( + abs( + df.to_dict(orient="list")[f"{fv.name}__value"][0] + - expected_value + ) + < 1e-6 + ) + else: + assert not df.to_dict(orient="list")[f"{fv.name}__value"] or math.isnan( + df.to_dict(orient="list")[f"{fv.name}__value"][0] + ) + else: + if expected_value: + assert ( + abs(df.to_dict(orient="list")["value"][0] - expected_value) < 1e-6 + ) + else: + assert not df.to_dict(orient="list")["value"] or math.isnan( + df.to_dict(orient="list")["value"][0] + ) + + +def run_offline_online_store_consistency_test( + fs: FeatureStore, fv: FeatureView, split_dt: datetime +) -> None: + now = datetime.utcnow() + + full_feature_names = True + check_offline_store: bool = True + + # Run materialize() + # use both tz-naive & tz-aware timestamps to test that they're both correctly handled + start_date = (now - timedelta(hours=5)).replace(tzinfo=utc) + end_date = split_dt + fs.materialize(feature_views=[fv.name], start_date=start_date, end_date=end_date) + + # check result of materialize() + check_offline_and_online_features( + fs=fs, + fv=fv, + driver_id=1, + event_timestamp=end_date, + expected_value=0.3, + full_feature_names=full_feature_names, + check_offline_store=check_offline_store, + ) + + check_offline_and_online_features( + fs=fs, + fv=fv, + driver_id=2, + event_timestamp=end_date, + expected_value=None, + full_feature_names=full_feature_names, + check_offline_store=check_offline_store, + ) + + # check prior value for materialize_incremental() + check_offline_and_online_features( + fs=fs, + fv=fv, + driver_id=3, + event_timestamp=end_date, + expected_value=4, + full_feature_names=full_feature_names, + check_offline_store=check_offline_store, + ) + + # run materialize_incremental() + fs.materialize_incremental(feature_views=[fv.name], end_date=now) + + # check result of materialize_incremental() + check_offline_and_online_features( + fs=fs, + fv=fv, + driver_id=3, + event_timestamp=now, + expected_value=5, + full_feature_names=full_feature_names, + check_offline_store=check_offline_store, + ) diff --git a/sdk/python/tests/integration/e2e/test_usage_e2e.py b/sdk/python/tests/integration/e2e/test_usage_e2e.py new file mode 100644 index 0000000000..12c1eb8628 --- /dev/null +++ b/sdk/python/tests/integration/e2e/test_usage_e2e.py @@ -0,0 +1,147 @@ +# Copyright 2020 The Feast Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import os +import sys +import tempfile +from importlib import reload +from unittest.mock import patch + +import pytest + +from feast import Entity, RepoConfig, ValueType +from feast.infra.online_stores.sqlite import SqliteOnlineStoreConfig + + +@pytest.fixture(scope="function") +def dummy_exporter(): + event_log = [] + + with patch("feast.usage._export", new=event_log.append): + yield event_log + + +@pytest.fixture(scope="function") +def enabling_toggle(): + with patch("feast.usage._is_enabled") as p: + p.__bool__.return_value = True + yield p + + # return to initial state + _reload_feast() + + +@pytest.mark.integration +def test_usage_on(dummy_exporter, enabling_toggle): + _reload_feast() + from feast.feature_store import FeatureStore + + with tempfile.TemporaryDirectory() as temp_dir: + test_feature_store = FeatureStore( + config=RepoConfig( + registry=os.path.join(temp_dir, "registry.db"), + project="fake_project", + provider="local", + online_store=SqliteOnlineStoreConfig( + path=os.path.join(temp_dir, "online.db") + ), + ) + ) + entity = Entity( + name="driver_car_id", + description="Car driver id", + value_type=ValueType.STRING, + tags={"team": "matchmaking"}, + ) + + test_feature_store.apply([entity]) + + assert len(dummy_exporter) == 3 + assert { + "entrypoint": "feast.infra.local.LocalRegistryStore.get_registry_proto" + }.items() <= dummy_exporter[0].items() + assert { + "entrypoint": "feast.infra.local.LocalRegistryStore.update_registry_proto" + }.items() <= dummy_exporter[1].items() + assert { + "entrypoint": "feast.feature_store.FeatureStore.apply" + }.items() <= dummy_exporter[2].items() + + +@pytest.mark.integration +def test_usage_off(dummy_exporter, enabling_toggle): + enabling_toggle.__bool__.return_value = False + + _reload_feast() + from feast.feature_store import FeatureStore + + with tempfile.TemporaryDirectory() as temp_dir: + test_feature_store = FeatureStore( + config=RepoConfig( + registry=os.path.join(temp_dir, "registry.db"), + project="fake_project", + provider="local", + online_store=SqliteOnlineStoreConfig( + path=os.path.join(temp_dir, "online.db") + ), + ) + ) + entity = Entity( + name="driver_car_id", + description="Car driver id", + value_type=ValueType.STRING, + tags={"team": "matchmaking"}, + ) + test_feature_store.apply([entity]) + + assert not dummy_exporter + + +@pytest.mark.integration +def test_exception_usage_on(dummy_exporter, enabling_toggle): + _reload_feast() + from feast.feature_store import FeatureStore + + with pytest.raises(OSError): + FeatureStore("/tmp/non_existent_directory") + + assert len(dummy_exporter) == 1 + assert { + "entrypoint": "feast.feature_store.FeatureStore.__init__", + "exception": repr(FileNotFoundError(2, "No such file or directory")), + }.items() <= dummy_exporter[0].items() + + +@pytest.mark.integration +def test_exception_usage_off(dummy_exporter, enabling_toggle): + enabling_toggle.__bool__.return_value = False + + _reload_feast() + from feast.feature_store import FeatureStore + + with pytest.raises(OSError): + FeatureStore("/tmp/non_existent_directory") + + assert not dummy_exporter + + +def _reload_feast(): + """After changing environment need to reload modules and rerun usage decorators""" + modules = ( + "feast.infra.local", + "feast.infra.online_stores.sqlite", + "feast.feature_store", + ) + for mod in modules: + if mod in sys.modules: + reload(sys.modules[mod]) diff --git a/sdk/python/tests/integration/e2e/test_validation.py b/sdk/python/tests/integration/e2e/test_validation.py new file mode 100644 index 0000000000..76bbe152c5 --- /dev/null +++ b/sdk/python/tests/integration/e2e/test_validation.py @@ -0,0 +1,130 @@ +import pandas as pd +import pytest +from great_expectations.core import ExpectationSuite +from great_expectations.dataset import PandasDataset + +from feast.dqm.errors import ValidationFailed +from feast.dqm.profilers.ge_profiler import ge_profiler +from tests.integration.feature_repos.repo_configuration import ( + construct_universal_feature_views, +) +from tests.integration.feature_repos.universal.entities import ( + customer, + driver, + location, +) + +_features = [ + "customer_profile:current_balance", + "customer_profile:avg_passenger_count", + "customer_profile:lifetime_trip_count", + "order:order_is_success", + "global_stats:num_rides", + "global_stats:avg_ride_length", +] + + +@ge_profiler +def configurable_profiler(dataset: PandasDataset) -> ExpectationSuite: + from great_expectations.profile.user_configurable_profiler import ( + UserConfigurableProfiler, + ) + + return UserConfigurableProfiler( + profile_dataset=dataset, + excluded_expectations=[ + "expect_table_columns_to_match_ordered_list", + "expect_table_row_count_to_be_between", + ], + value_set_threshold="few", + ).build_suite() + + +@ge_profiler +def profiler_with_unrealistic_expectations(dataset: PandasDataset) -> ExpectationSuite: + # need to create dataframe with corrupted data first + df = pd.DataFrame() + df["current_balance"] = [-100] + df["avg_passenger_count"] = [0] + + other_ds = PandasDataset(df) + other_ds.expect_column_max_to_be_between("current_balance", -1000, -100) + other_ds.expect_column_values_to_be_in_set("avg_passenger_count", value_set={0}) + + # this should pass + other_ds.expect_column_min_to_be_between("avg_passenger_count", 0, 1000) + + return other_ds.get_expectation_suite() + + +@pytest.mark.integration +@pytest.mark.universal +def test_historical_retrieval_with_validation(environment, universal_data_sources): + store = environment.feature_store + (entities, datasets, data_sources) = universal_data_sources + feature_views = construct_universal_feature_views(data_sources) + store.apply([driver(), customer(), location(), *feature_views.values()]) + + # Create two identical retrieval jobs + entity_df = datasets.entity_df.drop( + columns=["order_id", "origin_id", "destination_id"] + ) + reference_job = store.get_historical_features( + entity_df=entity_df, features=_features, + ) + job = store.get_historical_features(entity_df=entity_df, features=_features,) + + # Save dataset using reference job and retrieve it + store.create_saved_dataset( + from_=reference_job, + name="my_training_dataset", + storage=environment.data_source_creator.create_saved_dataset_destination(), + ) + saved_dataset = store.get_saved_dataset("my_training_dataset") + + # If validation pass there will be no exceptions on this point + reference = saved_dataset.as_reference(profiler=configurable_profiler) + job.to_df(validation_reference=reference) + + +@pytest.mark.integration +@pytest.mark.universal +def test_historical_retrieval_fails_on_validation(environment, universal_data_sources): + store = environment.feature_store + + (entities, datasets, data_sources) = universal_data_sources + feature_views = construct_universal_feature_views(data_sources) + + store.apply([driver(), customer(), location(), *feature_views.values()]) + + entity_df = datasets.entity_df.drop( + columns=["order_id", "origin_id", "destination_id"] + ) + + reference_job = store.get_historical_features( + entity_df=entity_df, features=_features, + ) + + store.create_saved_dataset( + from_=reference_job, + name="my_other_dataset", + storage=environment.data_source_creator.create_saved_dataset_destination(), + ) + + job = store.get_historical_features(entity_df=entity_df, features=_features,) + + with pytest.raises(ValidationFailed) as exc_info: + job.to_df( + validation_reference=store.get_saved_dataset( + "my_other_dataset" + ).as_reference(profiler=profiler_with_unrealistic_expectations) + ) + + failed_expectations = exc_info.value.report.errors + assert len(failed_expectations) == 2 + + assert failed_expectations[0].check_name == "expect_column_max_to_be_between" + assert failed_expectations[0].column_name == "current_balance" + + assert failed_expectations[1].check_name == "expect_column_values_to_be_in_set" + assert failed_expectations[1].column_name == "avg_passenger_count" diff --git a/sdk/python/tests/integration/feature_repos/integration_test_repo_config.py b/sdk/python/tests/integration/feature_repos/integration_test_repo_config.py new file mode 100644 index 0000000000..f8cd66a619 --- /dev/null +++ b/sdk/python/tests/integration/feature_repos/integration_test_repo_config.py @@ -0,0 +1,52 @@ +from dataclasses import dataclass +from typing import Dict, Optional, Type, Union + +from tests.integration.feature_repos.universal.data_source_creator import ( + DataSourceCreator, +) +from tests.integration.feature_repos.universal.data_sources.file import ( + FileDataSourceCreator, +) +from tests.integration.feature_repos.universal.online_store_creator import ( + OnlineStoreCreator, +) + + +@dataclass(frozen=False) +class IntegrationTestRepoConfig: + """ + This class should hold all possible parameters that may need to be varied by individual tests. + """ + + provider: str = "local" + online_store: Union[str, Dict] = "sqlite" + + offline_store_creator: Type[DataSourceCreator] = FileDataSourceCreator + online_store_creator: Optional[Type[OnlineStoreCreator]] = None + + full_feature_names: bool = True + infer_features: bool = False + python_feature_server: bool = False + go_feature_retrieval: bool = False + + def __repr__(self) -> str: + if not self.online_store_creator: + if isinstance(self.online_store, str): + online_store_type = self.online_store + elif isinstance(self.online_store, dict): + if self.online_store["type"] == "redis": + online_store_type = self.online_store.get("redis_type", "redis") + else: + online_store_type = self.online_store["type"] + else: + online_store_type = self.online_store.__name__ + else: + online_store_type = self.online_store_creator.__name__ + + return ":".join( + [ + f"{self.provider.upper()}", + f"{self.offline_store_creator.__name__.split('.')[-1].replace('DataSourceCreator', '')}", + online_store_type, + ] + ) diff --git a/sdk/python/tests/integration/feature_repos/repo_configuration.py b/sdk/python/tests/integration/feature_repos/repo_configuration.py new file mode 100644 index 0000000000..9902f7c7b8 --- /dev/null +++ b/sdk/python/tests/integration/feature_repos/repo_configuration.py @@ -0,0 +1,463 @@ +import dataclasses +import importlib +import json +import os +import re +import tempfile +import uuid +from dataclasses import dataclass +from datetime import datetime, timedelta +from pathlib import Path +from typing import Any, List, Optional, Tuple, Union + +import pandas as pd +import yaml +from testcontainers.core.container import DockerContainer + +from feast import FeatureStore, FeatureView, OnDemandFeatureView, driver_test_data +from feast.constants import FULL_REPO_CONFIGS_MODULE_ENV_NAME +from feast.data_source import DataSource +from feast.errors import FeastModuleImportError +from feast.repo_config import RegistryConfig, RepoConfig +from tests.integration.feature_repos.integration_test_repo_config import ( + IntegrationTestRepoConfig, +) +from tests.integration.feature_repos.universal.data_source_creator import ( + DataSourceCreator, +) +from tests.integration.feature_repos.universal.data_sources.bigquery import ( + BigQueryDataSourceCreator, +) +from tests.integration.feature_repos.universal.data_sources.redshift import ( + RedshiftDataSourceCreator, +) +from tests.integration.feature_repos.universal.data_sources.snowflake import ( + SnowflakeDataSourceCreator, +) +from tests.integration.feature_repos.universal.feature_views import ( + conv_rate_plus_100_feature_view, + create_conv_rate_request_source, + create_customer_daily_profile_feature_view, + create_driver_hourly_stats_feature_view, + create_field_mapping_feature_view, + create_global_stats_feature_view, + create_location_stats_feature_view, + create_order_feature_view, + create_pushable_feature_view, +) +from tests.integration.feature_repos.universal.online_store.datastore import ( + DatastoreOnlineStoreCreator, +) +from tests.integration.feature_repos.universal.online_store.dynamodb import ( + DynamoDBOnlineStoreCreator, +) +from tests.integration.feature_repos.universal.online_store.redis import ( + RedisOnlineStoreCreator, +) +from tests.integration.feature_repos.universal.online_store_creator import ( + OnlineStoreCreator, +) + +DYNAMO_CONFIG = {"type": "dynamodb", "region": "us-west-2"} +# Port 12345 will chosen as default for redis node configuration because Redis Cluster is started off of nodes +# 6379 -> 6384. This causes conflicts in cli integration tests so we manually keep them separate. +REDIS_CONFIG = {"type": "redis", "connection_string": "localhost:6379,db=0"} +REDIS_CLUSTER_CONFIG = { + "type": "redis", + "redis_type": "redis_cluster", + # Redis Cluster Port Forwarding is setup in "pr_integration_tests.yaml" under "Setup Redis Cluster". + "connection_string": "127.0.0.1:6001,127.0.0.1:6002,127.0.0.1:6003", +} + +# FULL_REPO_CONFIGS contains the repo configurations (e.g. provider, offline store, +# online store, test data, and more parameters) that most integration tests will test +# against. By default, FULL_REPO_CONFIGS uses the three providers (local, GCP, and AWS) +# with their default offline and online stores; it also tests the providers with the +# Redis online store. It can be overwritten by specifying a Python module through the +# FULL_REPO_CONFIGS_MODULE_ENV_NAME environment variable. In this case, that Python +# module will be imported and FULL_REPO_CONFIGS will be extracted from the file. +DEFAULT_FULL_REPO_CONFIGS: List[IntegrationTestRepoConfig] = [ + # Local configurations + IntegrationTestRepoConfig(), + IntegrationTestRepoConfig(python_feature_server=True), +] +if os.getenv("FEAST_IS_LOCAL_TEST", "False") != "True": + DEFAULT_FULL_REPO_CONFIGS.extend( + [ + IntegrationTestRepoConfig(online_store=REDIS_CONFIG), + # GCP configurations + IntegrationTestRepoConfig( + provider="gcp", + offline_store_creator=BigQueryDataSourceCreator, + online_store="datastore", + ), + IntegrationTestRepoConfig( + provider="gcp", + offline_store_creator=BigQueryDataSourceCreator, + online_store=REDIS_CONFIG, + ), + # AWS configurations + IntegrationTestRepoConfig( + provider="aws", + offline_store_creator=RedshiftDataSourceCreator, + online_store=DYNAMO_CONFIG, + python_feature_server=True, + ), + IntegrationTestRepoConfig( + provider="aws", + offline_store_creator=RedshiftDataSourceCreator, + online_store=REDIS_CONFIG, + ), + # Snowflake configurations + IntegrationTestRepoConfig( + provider="aws", # no list features, no feature server + offline_store_creator=SnowflakeDataSourceCreator, + online_store=REDIS_CONFIG, + ), + # Go implementation for online retrieval + IntegrationTestRepoConfig( + online_store=REDIS_CONFIG, go_feature_retrieval=True, + ), + # TODO(felixwang9817): Enable this test once https://github.com/feast-dev/feast/issues/2544 is resolved. + # IntegrationTestRepoConfig( + # online_store=REDIS_CONFIG, + # python_feature_server=True, + # go_feature_retrieval=True, + # ), + ] + ) +if os.getenv("FEAST_GO_FEATURE_RETRIEVAL", "False") == "True": + DEFAULT_FULL_REPO_CONFIGS = [ + IntegrationTestRepoConfig( + online_store=REDIS_CONFIG, go_feature_retrieval=True, + ), + ] +full_repo_configs_module = os.environ.get(FULL_REPO_CONFIGS_MODULE_ENV_NAME) +if full_repo_configs_module is not None: + try: + module = importlib.import_module(full_repo_configs_module) + FULL_REPO_CONFIGS = getattr(module, "FULL_REPO_CONFIGS") + except Exception as e: + raise FeastModuleImportError( + "FULL_REPO_CONFIGS", full_repo_configs_module + ) from e +else: + FULL_REPO_CONFIGS = DEFAULT_FULL_REPO_CONFIGS + +if os.getenv("FEAST_LOCAL_ONLINE_CONTAINER", "False").lower() == "true": + replacements = {"datastore": DatastoreOnlineStoreCreator} + replacement_dicts = [ + (REDIS_CONFIG, RedisOnlineStoreCreator), + (DYNAMO_CONFIG, DynamoDBOnlineStoreCreator), + ] + for c in FULL_REPO_CONFIGS: + if isinstance(c.online_store, dict): + for _replacement in replacement_dicts: + if c.online_store == _replacement[0]: + c.online_store_creator = _replacement[1] + elif c.online_store in replacements: + c.online_store_creator = replacements[c.online_store] + + +@dataclass +class UniversalEntities: + customer_vals: List[Any] + driver_vals: List[Any] + location_vals: List[Any] + + +def construct_universal_entities() -> UniversalEntities: + return UniversalEntities( + customer_vals=list(range(1001, 1020)), + driver_vals=list(range(5001, 5020)), + location_vals=list(range(1, 50)), + ) + + +@dataclass +class UniversalDatasets: + customer_df: pd.DataFrame + driver_df: pd.DataFrame + location_df: pd.DataFrame + orders_df: pd.DataFrame + global_df: pd.DataFrame + field_mapping_df: pd.DataFrame + entity_df: pd.DataFrame + + +def construct_universal_datasets( + entities: UniversalEntities, start_time: datetime, end_time: datetime +) -> UniversalDatasets: + customer_df = driver_test_data.create_customer_daily_profile_df( + entities.customer_vals, start_time, end_time + ) + driver_df = driver_test_data.create_driver_hourly_stats_df( + entities.driver_vals, start_time, end_time + ) + location_df = driver_test_data.create_location_stats_df( + entities.location_vals, start_time, end_time + ) + orders_df = driver_test_data.create_orders_df( + customers=entities.customer_vals, + drivers=entities.driver_vals, + locations=entities.location_vals, + start_date=start_time, + end_date=end_time, + order_count=20, + ) + global_df = driver_test_data.create_global_daily_stats_df(start_time, end_time) + field_mapping_df = driver_test_data.create_field_mapping_df(start_time, end_time) + entity_df = orders_df[ + [ + "customer_id", + "driver_id", + "order_id", + "origin_id", + "destination_id", + "event_timestamp", + ] + ] + + return UniversalDatasets( + customer_df=customer_df, + driver_df=driver_df, + location_df=location_df, + orders_df=orders_df, + global_df=global_df, + field_mapping_df=field_mapping_df, + entity_df=entity_df, + ) + + +@dataclass +class UniversalDataSources: + customer: DataSource + driver: DataSource + location: DataSource + orders: DataSource + global_ds: DataSource + field_mapping: DataSource + + def values(self): + return dataclasses.asdict(self).values() + + +def construct_universal_data_sources( + datasets: UniversalDatasets, data_source_creator: DataSourceCreator +) -> UniversalDataSources: + customer_ds = data_source_creator.create_data_source( + datasets.customer_df, + destination_name="customer_profile", + timestamp_field="event_timestamp", + created_timestamp_column="created", + ) + driver_ds = data_source_creator.create_data_source( + datasets.driver_df, + destination_name="driver_hourly", + timestamp_field="event_timestamp", + created_timestamp_column="created", + ) + location_ds = data_source_creator.create_data_source( + datasets.location_df, + destination_name="location_hourly", + timestamp_field="event_timestamp", + created_timestamp_column="created", + ) + orders_ds = data_source_creator.create_data_source( + datasets.orders_df, + destination_name="orders", + timestamp_field="event_timestamp", + created_timestamp_column=None, + ) + global_ds = data_source_creator.create_data_source( + datasets.global_df, + destination_name="global", + timestamp_field="event_timestamp", + created_timestamp_column="created", + ) + field_mapping_ds = data_source_creator.create_data_source( + datasets.field_mapping_df, + destination_name="field_mapping", + timestamp_field="event_timestamp", + created_timestamp_column="created", + field_mapping={"column_name": "feature_name"}, + ) + return UniversalDataSources( + customer=customer_ds, + driver=driver_ds, + location=location_ds, + orders=orders_ds, + global_ds=global_ds, + field_mapping=field_mapping_ds, + ) + + +@dataclass +class UniversalFeatureViews: + customer: FeatureView + global_fv: FeatureView + driver: FeatureView + driver_odfv: OnDemandFeatureView + order: FeatureView + location: FeatureView + field_mapping: FeatureView + pushed_locations: FeatureView + + def values(self): + return dataclasses.asdict(self).values() + + +def construct_universal_feature_views( + data_sources: UniversalDataSources, with_odfv: bool = True, +) -> UniversalFeatureViews: + driver_hourly_stats = create_driver_hourly_stats_feature_view(data_sources.driver) + return UniversalFeatureViews( + customer=create_customer_daily_profile_feature_view(data_sources.customer), + global_fv=create_global_stats_feature_view(data_sources.global_ds), + driver=driver_hourly_stats, + driver_odfv=conv_rate_plus_100_feature_view( + { + "driver": driver_hourly_stats, + "input_request": create_conv_rate_request_source(), + } + ) + if with_odfv + else None, + order=create_order_feature_view(data_sources.orders), + location=create_location_stats_feature_view(data_sources.location), + field_mapping=create_field_mapping_feature_view(data_sources.field_mapping), + pushed_locations=create_pushable_feature_view(data_sources.location), + ) + + +@dataclass +class Environment: + name: str + test_repo_config: IntegrationTestRepoConfig + feature_store: FeatureStore + data_source_creator: DataSourceCreator + python_feature_server: bool + worker_id: str + online_store_creator: Optional[OnlineStoreCreator] = None + + def __post_init__(self): + self.end_date = datetime.utcnow().replace(microsecond=0, second=0, minute=0) + self.start_date: datetime = self.end_date - timedelta(days=3) + + def get_feature_server_endpoint(self) -> str: + if self.python_feature_server and self.test_repo_config.provider == "local": + return f"http://localhost:{self.get_local_server_port()}" + return self.feature_store.get_feature_server_endpoint() + + def get_local_server_port(self) -> int: + # Heuristic when running with xdist to extract unique ports for each worker + parsed_worker_id = re.findall("gw(\\d+)", self.worker_id) + if len(parsed_worker_id) != 0: + worker_id_num = int(parsed_worker_id[0]) + else: + worker_id_num = 0 + return 6566 + worker_id_num + + +def table_name_from_data_source(ds: DataSource) -> Optional[str]: + if hasattr(ds, "table_ref"): + return ds.table_ref # type: ignore + elif hasattr(ds, "table"): + return ds.table # type: ignore + return None + + +def construct_test_environment( + test_repo_config: IntegrationTestRepoConfig, + test_suite_name: str = "integration_test", + worker_id: str = "worker_id", + offline_container: Optional[DockerContainer] = None, +) -> Environment: + _uuid = str(uuid.uuid4()).replace("-", "")[:6] + + run_id = os.getenv("GITHUB_RUN_ID", default=None) + run_id = f"gh_run_{run_id}_{_uuid}" if run_id else _uuid + run_num = os.getenv("GITHUB_RUN_NUMBER", default=1) + + project = f"{test_suite_name}_{run_id}_{run_num}" + + offline_creator: DataSourceCreator = test_repo_config.offline_store_creator( + project, offline_container=offline_container + ) + offline_store_config = offline_creator.create_offline_store_config() + + if test_repo_config.online_store_creator: + online_creator = test_repo_config.online_store_creator(project) + online_store = ( + test_repo_config.online_store + ) = online_creator.create_online_store() + else: + online_creator = None + online_store = test_repo_config.online_store + + repo_dir_name = tempfile.mkdtemp() + + if test_repo_config.python_feature_server and test_repo_config.provider == "aws": + from feast.infra.feature_servers.aws_lambda.config import ( + AwsLambdaFeatureServerConfig, + ) + + feature_server = AwsLambdaFeatureServerConfig( + enabled=True, + execution_role_name="arn:aws:iam::402087665549:role/lambda_execution_role", + ) + + registry = ( + f"s3://feast-integration-tests/registries/{project}/registry.db" + ) # type: Union[str, RegistryConfig] + else: + # Note: even if it's a local feature server, the repo config does not have this configured + feature_server = None + registry = RegistryConfig( + path=str(Path(repo_dir_name) / "registry.db"), cache_ttl_seconds=1, + ) + config = RepoConfig( + registry=registry, + project=project, + provider=test_repo_config.provider, + offline_store=offline_store_config, + online_store=online_store, + repo_path=repo_dir_name, + feature_server=feature_server, + go_feature_retrieval=test_repo_config.go_feature_retrieval, + ) + + # Create feature_store.yaml out of the config + with open(Path(repo_dir_name) / "feature_store.yaml", "w") as f: + yaml.safe_dump(json.loads(config.json()), f) + + fs = FeatureStore(repo_dir_name) + # We need to initialize the registry, because if nothing is applied in the test before tearing down + # the feature store, that will cause the teardown method to blow up. + fs.registry._initialize_registry() + environment = Environment( + name=project, + test_repo_config=test_repo_config, + feature_store=fs, + data_source_creator=offline_creator, + python_feature_server=test_repo_config.python_feature_server, + worker_id=worker_id, + online_store_creator=online_creator, + ) + + return environment + + +TestData = Tuple[UniversalEntities, UniversalDatasets, UniversalDataSources] + + +def construct_universal_test_data(environment: Environment) -> TestData: + entities = construct_universal_entities() + datasets = construct_universal_datasets( + entities, environment.start_date, environment.end_date + ) + data_sources = construct_universal_data_sources( + datasets, environment.data_source_creator + ) + + return entities, datasets, data_sources diff --git a/sdk/python/tests/integration/feature_repos/universal/data_source_creator.py b/sdk/python/tests/integration/feature_repos/universal/data_source_creator.py new file mode 100644 index 0000000000..ba36f8e89b --- /dev/null +++ b/sdk/python/tests/integration/feature_repos/universal/data_source_creator.py @@ -0,0 +1,56 @@ +from abc import ABC, abstractmethod +from typing import Dict, Optional + +import pandas as pd + +from feast.data_source import DataSource +from feast.repo_config import FeastConfigBaseModel +from feast.saved_dataset import SavedDatasetStorage + + +class DataSourceCreator(ABC): + def __init__(self, project_name: str, *args, **kwargs): + self.project_name = project_name + + @abstractmethod + def create_data_source( + self, + df: pd.DataFrame, + destination_name: str, + event_timestamp_column="ts", + created_timestamp_column="created_ts", + field_mapping: Dict[str, str] = None, + timestamp_field: Optional[str] = None, + ) -> DataSource: + """ + Create a data source based on the dataframe. Implementing this method requires the underlying implementation to + persist the dataframe in offline store, using the destination string as a way to differentiate multiple + dataframes and data sources. + + Args: + df: The dataframe to be used to create the data source. + destination_name: This str is used by the implementing classes to + isolate the multiple dataframes from each other. + event_timestamp_column: (Deprecated) Pass through for the underlying data source. + created_timestamp_column: Pass through for the underlying data source. + field_mapping: Pass through for the underlying data source. + timestamp_field: Pass through for the underlying data source. + + + Returns: + A Data source object, pointing to a table or file that is uploaded/persisted for the purpose of the + test. + """ + ... + + @abstractmethod + def create_offline_store_config(self) -> FeastConfigBaseModel: + ... + + @abstractmethod + def create_saved_dataset_destination(self) -> SavedDatasetStorage: + ... + + @abstractmethod + def teardown(self): + ... diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/bigquery.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/bigquery.py new file mode 100644 index 0000000000..881f547617 --- /dev/null +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/bigquery.py @@ -0,0 +1,90 @@ +import uuid +from typing import Dict, List, Optional + +import pandas as pd +from google.cloud import bigquery +from google.cloud.bigquery import Dataset + +from feast import BigQuerySource +from feast.data_source import DataSource +from feast.infra.offline_stores.bigquery import BigQueryOfflineStoreConfig +from feast.infra.offline_stores.bigquery_source import SavedDatasetBigQueryStorage +from tests.integration.feature_repos.universal.data_source_creator import ( + DataSourceCreator, +) + + +class BigQueryDataSourceCreator(DataSourceCreator): + dataset: Optional[Dataset] = None + + def __init__(self, project_name: str, *args, **kwargs): + super().__init__(project_name) + self.client = bigquery.Client() + self.gcp_project = self.client.project + self.dataset_id = f"{self.gcp_project}.{project_name}" + + self.tables: List[str] = [] + + def create_dataset(self): + if not self.dataset: + self.dataset = bigquery.Dataset(self.dataset_id) + print(f"Creating dataset: {self.dataset_id}") + self.client.create_dataset(self.dataset, exists_ok=True) + self.dataset.default_table_expiration_ms = ( + 1000 * 60 * 60 * 24 * 14 + ) # 2 weeks in milliseconds + self.client.update_dataset(self.dataset, ["default_table_expiration_ms"]) + + def teardown(self): + + for table in self.tables: + self.client.delete_table(table, not_found_ok=True) + + self.client.delete_dataset( + self.dataset_id, delete_contents=True, not_found_ok=True + ) + print(f"Deleted dataset '{self.dataset_id}'") + self.dataset = None + + def create_offline_store_config(self): + return BigQueryOfflineStoreConfig() + + def create_data_source( + self, + df: pd.DataFrame, + destination_name: str, + timestamp_field="ts", + created_timestamp_column="created_ts", + field_mapping: Dict[str, str] = None, + **kwargs, + ) -> DataSource: + + destination_name = self.get_prefixed_table_name(destination_name) + + self.create_dataset() + + if self.gcp_project not in destination_name: + destination_name = ( + f"{self.gcp_project}.{self.project_name}.{destination_name}" + ) + + job = self.client.load_table_from_dataframe(df, destination_name) + job.result() + + self.tables.append(destination_name) + + return BigQuerySource( + table=destination_name, + timestamp_field=timestamp_field, + created_timestamp_column=created_timestamp_column, + field_mapping=field_mapping or {"ts_1": "ts"}, + ) + + def create_saved_dataset_destination(self) -> SavedDatasetBigQueryStorage: + table = self.get_prefixed_table_name( + f"persisted_{str(uuid.uuid4()).replace('-', '_')}" + ) + return SavedDatasetBigQueryStorage(table=table) + + def get_prefixed_table_name(self, suffix: str) -> str: + return f"{self.client.project}.{self.project_name}.{suffix}" diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/catalog/memory.properties b/sdk/python/tests/integration/feature_repos/universal/data_sources/catalog/memory.properties new file mode 100644 index 0000000000..6a291def3c --- /dev/null +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/catalog/memory.properties @@ -0,0 +1,2 @@ +connector.name=memory +memory.max-data-per-node=128MB \ No newline at end of file diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py new file mode 100644 index 0000000000..64c3aeacf3 --- /dev/null +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py @@ -0,0 +1,154 @@ +import tempfile +import uuid +from typing import Any, Dict, List, Optional + +import pandas as pd +from minio import Minio +from testcontainers.core.generic import DockerContainer +from testcontainers.core.waiting_utils import wait_for_logs + +from feast import FileSource +from feast.data_format import ParquetFormat +from feast.data_source import DataSource +from feast.infra.offline_stores.file import FileOfflineStoreConfig +from feast.infra.offline_stores.file_source import SavedDatasetFileStorage +from feast.repo_config import FeastConfigBaseModel +from tests.integration.feature_repos.universal.data_source_creator import ( + DataSourceCreator, +) + + +class FileDataSourceCreator(DataSourceCreator): + files: List[Any] + + def __init__(self, project_name: str, *args, **kwargs): + super().__init__(project_name) + self.files = [] + + def create_data_source( + self, + df: pd.DataFrame, + destination_name: str, + timestamp_field="ts", + created_timestamp_column="created_ts", + field_mapping: Dict[str, str] = None, + ) -> DataSource: + + destination_name = self.get_prefixed_table_name(destination_name) + + f = tempfile.NamedTemporaryFile( + prefix=f"{self.project_name}_{destination_name}", + suffix=".parquet", + delete=False, + ) + df.to_parquet(f.name) + self.files.append(f) + return FileSource( + file_format=ParquetFormat(), + path=f"{f.name}", + timestamp_field=timestamp_field, + created_timestamp_column=created_timestamp_column, + field_mapping=field_mapping or {"ts_1": "ts"}, + ) + + def create_saved_dataset_destination(self) -> SavedDatasetFileStorage: + d = tempfile.mkdtemp(prefix=self.project_name) + return SavedDatasetFileStorage( + path=d, file_format=ParquetFormat(), s3_endpoint_override=None + ) + + def get_prefixed_table_name(self, suffix: str) -> str: + return f"{self.project_name}.{suffix}" + + def create_offline_store_config(self) -> FeastConfigBaseModel: + return FileOfflineStoreConfig() + + def teardown(self): + for f in self.files: + f.close() + + +class S3FileDataSourceCreator(DataSourceCreator): + f: Any + minio: DockerContainer + bucket = "feast-test" + access_key = "AKIAIOSFODNN7EXAMPLE" + secret = "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY" + minio_image = "minio/minio:RELEASE.2021-08-17T20-53-08Z" + + def __init__(self, _: str): + self._setup_minio() + + def _setup_minio(self): + self.minio = DockerContainer(self.minio_image) + self.minio.with_exposed_ports(9000).with_exposed_ports(9001).with_env( + "MINIO_ROOT_USER", self.access_key + ).with_env("MINIO_ROOT_PASSWORD", self.secret).with_command( + 'server /data --console-address ":9001"' + ) + self.minio.start() + log_string_to_wait_for = ( + "API" # The minio container will print "API: ..." when ready. + ) + wait_for_logs(container=self.minio, predicate=log_string_to_wait_for, timeout=5) + + def _upload_parquet_file(self, df, file_name, minio_endpoint): + self.f = tempfile.NamedTemporaryFile(suffix=".parquet", delete=False) + df.to_parquet(self.f.name) + + client = Minio( + minio_endpoint, + access_key=self.access_key, + secret_key=self.secret, + secure=False, + ) + if not client.bucket_exists(self.bucket): + client.make_bucket(self.bucket) + client.fput_object( + self.bucket, file_name, self.f.name, + ) + + def create_data_source( + self, + df: pd.DataFrame, + destination_name: Optional[str] = None, + suffix: Optional[str] = None, + timestamp_field="ts", + created_timestamp_column="created_ts", + field_mapping: Dict[str, str] = None, + ) -> DataSource: + filename = f"{destination_name}.parquet" + port = self.minio.get_exposed_port("9000") + host = self.minio.get_container_host_ip() + minio_endpoint = f"{host}:{port}" + + self._upload_parquet_file(df, filename, minio_endpoint) + + return FileSource( + file_format=ParquetFormat(), + path=f"s3://{self.bucket}/{filename}", + timestamp_field=timestamp_field, + created_timestamp_column=created_timestamp_column, + field_mapping=field_mapping or {"ts_1": "ts"}, + s3_endpoint_override=f"http://{host}:{port}", + ) + + def create_saved_dataset_destination(self) -> SavedDatasetFileStorage: + port = self.minio.get_exposed_port("9000") + host = self.minio.get_container_host_ip() + + return SavedDatasetFileStorage( + path=f"s3://{self.bucket}/persisted/{str(uuid.uuid4())}", + file_format=ParquetFormat(), + s3_endpoint_override=f"http://{host}:{port}", + ) + + def get_prefixed_table_name(self, suffix: str) -> str: + return f"{suffix}" + + def create_offline_store_config(self) -> FeastConfigBaseModel: + return FileOfflineStoreConfig() + + def teardown(self): + self.minio.stop() + self.f.close() diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py new file mode 100644 index 0000000000..7e305fee80 --- /dev/null +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py @@ -0,0 +1,91 @@ +import uuid +from typing import Dict, List, Optional + +import pandas as pd + +from feast import RedshiftSource +from feast.data_source import DataSource +from feast.infra.offline_stores.redshift import RedshiftOfflineStoreConfig +from feast.infra.offline_stores.redshift_source import SavedDatasetRedshiftStorage +from feast.infra.utils import aws_utils +from feast.repo_config import FeastConfigBaseModel +from tests.integration.feature_repos.universal.data_source_creator import ( + DataSourceCreator, +) + + +class RedshiftDataSourceCreator(DataSourceCreator): + + tables: List[str] = [] + + def __init__(self, project_name: str, *args, **kwargs): + super().__init__(project_name) + self.client = aws_utils.get_redshift_data_client("us-west-2") + self.s3 = aws_utils.get_s3_resource("us-west-2") + + self.offline_store_config = RedshiftOfflineStoreConfig( + cluster_id="feast-integration-tests", + region="us-west-2", + user="admin", + database="feast", + s3_staging_location="s3://feast-integration-tests/redshift/tests/ingestion", + iam_role="arn:aws:iam::402087665549:role/redshift_s3_access_role", + ) + + def create_data_source( + self, + df: pd.DataFrame, + destination_name: str, + suffix: Optional[str] = None, + timestamp_field="ts", + created_timestamp_column="created_ts", + field_mapping: Dict[str, str] = None, + ) -> DataSource: + + destination_name = self.get_prefixed_table_name(destination_name) + + aws_utils.upload_df_to_redshift( + self.client, + self.offline_store_config.cluster_id, + self.offline_store_config.database, + self.offline_store_config.user, + self.s3, + f"{self.offline_store_config.s3_staging_location}/copy/{destination_name}.parquet", + self.offline_store_config.iam_role, + destination_name, + df, + ) + + self.tables.append(destination_name) + + return RedshiftSource( + table=destination_name, + timestamp_field=timestamp_field, + created_timestamp_column=created_timestamp_column, + field_mapping=field_mapping or {"ts_1": "ts"}, + database=self.offline_store_config.database, + ) + + def create_saved_dataset_destination(self) -> SavedDatasetRedshiftStorage: + table = self.get_prefixed_table_name( + f"persisted_ds_{str(uuid.uuid4()).replace('-', '_')}" + ) + self.tables.append(table) + + return SavedDatasetRedshiftStorage(table_ref=table) + + def create_offline_store_config(self) -> FeastConfigBaseModel: + return self.offline_store_config + + def get_prefixed_table_name(self, suffix: str) -> str: + return f"{self.project_name}_{suffix}" + + def teardown(self): + for table in self.tables: + aws_utils.execute_redshift_statement( + self.client, + self.offline_store_config.cluster_id, + self.offline_store_config.database, + self.offline_store_config.user, + f"DROP TABLE IF EXISTS {table}", + ) diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/snowflake.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/snowflake.py new file mode 100644 index 0000000000..3942444f32 --- /dev/null +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/snowflake.py @@ -0,0 +1,81 @@ +import os +import uuid +from typing import Dict, List, Optional + +import pandas as pd + +from feast import SnowflakeSource +from feast.data_source import DataSource +from feast.infra.offline_stores.snowflake import SnowflakeOfflineStoreConfig +from feast.infra.offline_stores.snowflake_source import SavedDatasetSnowflakeStorage +from feast.infra.utils.snowflake_utils import get_snowflake_conn, write_pandas +from feast.repo_config import FeastConfigBaseModel +from tests.integration.feature_repos.universal.data_source_creator import ( + DataSourceCreator, +) + + +class SnowflakeDataSourceCreator(DataSourceCreator): + + tables: List[str] = [] + + def __init__(self, project_name: str, *args, **kwargs): + super().__init__(project_name) + self.offline_store_config = SnowflakeOfflineStoreConfig( + type="snowflake.offline", + account=os.environ["SNOWFLAKE_CI_DEPLOYMENT"], + user=os.environ["SNOWFLAKE_CI_USER"], + password=os.environ["SNOWFLAKE_CI_PASSWORD"], + role=os.environ["SNOWFLAKE_CI_ROLE"], + warehouse=os.environ["SNOWFLAKE_CI_WAREHOUSE"], + database="FEAST", + schema="OFFLINE", + ) + + def create_data_source( + self, + df: pd.DataFrame, + destination_name: str, + suffix: Optional[str] = None, + timestamp_field="ts", + created_timestamp_column="created_ts", + field_mapping: Dict[str, str] = None, + ) -> DataSource: + + snowflake_conn = get_snowflake_conn(self.offline_store_config) + + destination_name = self.get_prefixed_table_name(destination_name) + + write_pandas(snowflake_conn, df, destination_name, auto_create_table=True) + + self.tables.append(destination_name) + + return SnowflakeSource( + table=destination_name, + timestamp_field=timestamp_field, + created_timestamp_column=created_timestamp_column, + field_mapping=field_mapping or {"ts_1": "ts"}, + warehouse=self.offline_store_config.warehouse, + ) + + def create_saved_dataset_destination(self) -> SavedDatasetSnowflakeStorage: + table = self.get_prefixed_table_name( + f"persisted_ds_{str(uuid.uuid4()).replace('-', '_')}" + ) + self.tables.append(table) + + return SavedDatasetSnowflakeStorage(table_ref=table) + + def create_offline_store_config(self) -> FeastConfigBaseModel: + return self.offline_store_config + + def get_prefixed_table_name(self, suffix: str) -> str: + return f"{self.project_name}_{suffix}" + + def teardown(self): + snowflake_conn = get_snowflake_conn(self.offline_store_config) + + with snowflake_conn as conn: + cur = conn.cursor() + for table in self.tables: + cur.execute(f'DROP TABLE IF EXISTS "{table}"') diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/spark_data_source_creator.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/spark_data_source_creator.py new file mode 100644 index 0000000000..65cdde9457 --- /dev/null +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/spark_data_source_creator.py @@ -0,0 +1,107 @@ +import uuid +from typing import Dict, List + +import pandas as pd +from pyspark import SparkConf +from pyspark.sql import SparkSession + +from feast.data_source import DataSource +from feast.infra.offline_stores.contrib.spark_offline_store.spark import ( + SparkOfflineStoreConfig, +) +from feast.infra.offline_stores.contrib.spark_offline_store.spark_source import ( + SavedDatasetSparkStorage, + SparkSource, +) +from tests.integration.feature_repos.universal.data_source_creator import ( + DataSourceCreator, +) + + +class SparkDataSourceCreator(DataSourceCreator): + tables: List[str] = [] + spark_offline_store_config = None + spark_session = None + + def __init__(self, project_name: str, *args, **kwargs): + super().__init__(project_name) + self.spark_conf = { + "master": "local[*]", + "spark.ui.enabled": "false", + "spark.eventLog.enabled": "false", + "spark.sql.parser.quotedRegexColumnNames": "true", + "spark.sql.session.timeZone": "UTC", + } + if not self.spark_offline_store_config: + self.create_offline_store_config() + if not self.spark_session: + self.spark_session = ( + SparkSession.builder.config( + conf=SparkConf().setAll( + [(k, v) for k, v in self.spark_conf.items()] + ) + ) + .appName("pytest-pyspark-local-testing") + .getOrCreate() + ) + self.tables: List[str] = [] + + def teardown(self): + self.spark_session.stop() + + def create_offline_store_config(self): + self.spark_offline_store_config = SparkOfflineStoreConfig() + self.spark_offline_store_config.type = "spark" + self.spark_offline_store_config.spark_conf = self.spark_conf + return self.spark_offline_store_config + + def create_data_source( + self, + df: pd.DataFrame, + destination_name: str, + timestamp_field="ts", + created_timestamp_column="created_ts", + field_mapping: Dict[str, str] = None, + **kwargs, + ) -> DataSource: + if timestamp_field in df: + df[timestamp_field] = pd.to_datetime(df[timestamp_field], utc=True) + # Make sure the field mapping is correct and convert the datetime datasources. + if field_mapping: + timestamp_mapping = {value: key for key, value in field_mapping.items()} + if ( + timestamp_field in timestamp_mapping + and timestamp_mapping[timestamp_field] in df + ): + col = timestamp_mapping[timestamp_field] + df[col] = pd.to_datetime(df[col], utc=True) + destination_name = self.get_prefixed_table_name(destination_name) + if not self.spark_session: + self.spark_session = ( + SparkSession.builder.config( + conf=SparkConf().setAll( + [(k, v) for k, v in self.spark_conf.items()] + ) + ) + .appName("pytest-pyspark-local-testing") + .getOrCreate() + ) + self.spark_session.createDataFrame(df).createOrReplaceTempView(destination_name) + self.tables.append(destination_name) + + return SparkSource( + table=destination_name, + timestamp_field=timestamp_field, + created_timestamp_column=created_timestamp_column, + date_partition_column="", + field_mapping=field_mapping or {"ts_1": "ts"}, + ) + + def create_saved_dataset_destination(self) -> SavedDatasetSparkStorage: + table = f"persisted_{str(uuid.uuid4()).replace('-', '_')}" + return SavedDatasetSparkStorage( + table=table, query=None, path=None, file_format=None + ) + + def get_prefixed_table_name(self, suffix: str) -> str: + return f"{self.project_name}_{suffix}" diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/trino.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/trino.py new file mode 100644 index 0000000000..ddcfafd31a --- /dev/null +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/trino.py @@ -0,0 +1,119 @@ +import pathlib +import uuid +from typing import Dict, List, Optional + +import pandas as pd +from testcontainers.core.container import DockerContainer +from testcontainers.core.waiting_utils import wait_for_logs + +from feast.data_source import DataSource +from feast.infra.offline_stores.contrib.trino_offline_store.connectors.upload import ( + upload_pandas_dataframe_to_trino, +) +from feast.infra.offline_stores.contrib.trino_offline_store.trino import ( + TrinoOfflineStoreConfig, +) +from feast.infra.offline_stores.contrib.trino_offline_store.trino_queries import Trino +from feast.infra.offline_stores.contrib.trino_offline_store.trino_source import ( + SavedDatasetTrinoStorage, + TrinoSource, +) +from feast.repo_config import FeastConfigBaseModel +from tests.integration.feature_repos.universal.data_source_creator import ( + DataSourceCreator, +) + + +class TrinoSourceCreator(DataSourceCreator): + + tables: List[str] = [] + + def __init__(self, project_name: str, **kwargs): + super().__init__(project_name) + self.tables_created: List[str] = [] + + if "offline_container" not in kwargs or not kwargs.get( + "offline_container", None + ): + # If we don't get an offline container provided, we try to create it on the fly. + # the problem here is that each test creates its own conatiner, which basically + # browns out developer laptops. + current_file = pathlib.Path(__file__).parent.resolve() + catalog_dir = current_file.parent.joinpath("catalog") + self.container = ( + DockerContainer("trinodb/trino:376") + .with_volume_mapping(catalog_dir, "/etc/catalog/") + .with_exposed_ports("8080") + ) + + self.container.start() + self.provided_container = False + log_string_to_wait_for = "SERVER STARTED" + wait_for_logs( + container=self.container, predicate=log_string_to_wait_for, timeout=30 + ) + else: + self.provided_container = True + self.container = kwargs["offline_container"] + + self.exposed_port = self.container.get_exposed_port("8080") + self.client = Trino( + user="user", catalog="memory", host="localhost", port=self.exposed_port, + ) + + def teardown(self): + if not self.provided_container: + self.container.stop() + + def create_data_source( + self, + df: pd.DataFrame, + destination_name: str, + suffix: Optional[str] = None, + timestamp_field="ts", + created_timestamp_column="created_ts", + field_mapping: Optional[Dict[str, str]] = None, + ) -> DataSource: + destination_name = self.get_prefixed_table_name(destination_name) + self.client.execute_query( + f"CREATE SCHEMA IF NOT EXISTS memory.{self.project_name}" + ) + self.client.execute_query(f"DROP TABLE IF EXISTS {destination_name}") + + self.tables.append(destination_name) + + upload_pandas_dataframe_to_trino( + client=self.client, + df=df, + table=destination_name, + connector_args={"type": "memory"}, + ) + + return TrinoSource( + name="ci_trino_offline_store", + table=destination_name, + timestamp_field=timestamp_field, + created_timestamp_column=created_timestamp_column, + query=f"SELECT * FROM {destination_name}", + field_mapping=field_mapping or {"ts_1": "ts"}, + ) + + def create_saved_dataset_destination(self) -> SavedDatasetTrinoStorage: + table = self.get_prefixed_table_name( + f"persisted_ds_{str(uuid.uuid4()).replace('-', '_')}" + ) + self.tables.append(table) + + return SavedDatasetTrinoStorage(table=table) + + def get_prefixed_table_name(self, suffix: str) -> str: + return f"memory.{self.project_name}.{suffix}" + + def create_offline_store_config(self) -> FeastConfigBaseModel: + return TrinoOfflineStoreConfig( + host="localhost", + port=self.exposed_port, + catalog="memory", + dataset=self.project_name, + connector={"type": "memory"}, + ) diff --git a/sdk/python/tests/integration/feature_repos/universal/entities.py b/sdk/python/tests/integration/feature_repos/universal/entities.py new file mode 100644 index 0000000000..b7a7583f1b --- /dev/null +++ b/sdk/python/tests/integration/feature_repos/universal/entities.py @@ -0,0 +1,22 @@ +from feast import Entity, ValueType + + +def driver(value_type: ValueType = ValueType.INT64): + return Entity( + name="driver", # The name is derived from this argument, not object name. + value_type=value_type, + description="driver id", + join_keys=["driver_id"], + ) + + +def customer(): + return Entity(name="customer_id", value_type=ValueType.INT64) + + +def location(): + return Entity(name="location_id", value_type=ValueType.INT64) + + +def item(): + return Entity(name="item_id", value_type=ValueType.INT64) diff --git a/sdk/python/tests/integration/feature_repos/universal/feature_views.py b/sdk/python/tests/integration/feature_repos/universal/feature_views.py new file mode 100644 index 0000000000..a6786528e1 --- /dev/null +++ b/sdk/python/tests/integration/feature_repos/universal/feature_views.py @@ -0,0 +1,249 @@ +from datetime import timedelta +from typing import Dict, List, Optional, Union + +import numpy as np +import pandas as pd + +from feast import ( + Feature, + FeatureView, + Field, + OnDemandFeatureView, + PushSource, + ValueType, +) +from feast.data_source import DataSource, RequestSource +from feast.types import Array, FeastType, Float32, Float64, Int32 +from tests.integration.feature_repos.universal.entities import location + + +def driver_feature_view( + data_source: DataSource, + name="test_correctness", + infer_features: bool = False, + dtype: FeastType = Float32, +) -> FeatureView: + return FeatureView( + name=name, + entities=["driver"], + schema=None if infer_features else [Field(name="value", dtype=dtype)], + ttl=timedelta(days=5), + source=data_source, + ) + + +def global_feature_view( + data_source: DataSource, + name="test_entityless", + infer_features: bool = False, + value_type: ValueType = ValueType.INT32, +) -> FeatureView: + return FeatureView( + name=name, + entities=[], + # Test that Features still work for FeatureViews. + features=None + if infer_features + else [Feature(name="entityless_value", dtype=value_type)], + ttl=timedelta(days=5), + source=data_source, + ) + + +def conv_rate_plus_100(features_df: pd.DataFrame) -> pd.DataFrame: + df = pd.DataFrame() + df["conv_rate_plus_100"] = features_df["conv_rate"] + 100 + df["conv_rate_plus_val_to_add"] = ( + features_df["conv_rate"] + features_df["val_to_add"] + ) + df["conv_rate_plus_100_rounded"] = ( + df["conv_rate_plus_100"].astype("float").round().astype(pd.Int32Dtype()) + ) + return df + + +def conv_rate_plus_100_feature_view( + sources: Dict[str, Union[RequestSource, FeatureView]], + infer_features: bool = False, + features: Optional[List[Field]] = None, +) -> OnDemandFeatureView: + # Test that positional arguments and Features still work for ODFVs. + _features = features or [ + Field(name="conv_rate_plus_100", dtype=Float64), + Field(name="conv_rate_plus_val_to_add", dtype=Float64), + Field(name="conv_rate_plus_100_rounded", dtype=Int32), + ] + return OnDemandFeatureView( + name=conv_rate_plus_100.__name__, + schema=[] if infer_features else _features, + sources=sources, + udf=conv_rate_plus_100, + ) + + +def similarity(features_df: pd.DataFrame) -> pd.DataFrame: + if features_df.size == 0: + # give hint to Feast about return type + df = pd.DataFrame({"cos_double": [0.0]}) + df["cos_float"] = df["cos_double"].astype(np.float32) + return df + vectors_a = features_df["embedding_double"].apply(np.array) + vectors_b = features_df["vector_double"].apply(np.array) + dot_products = vectors_a.mul(vectors_b).apply(sum) + norms_q = vectors_a.apply(np.linalg.norm) + norms_doc = vectors_b.apply(np.linalg.norm) + df = pd.DataFrame() + df["cos_double"] = dot_products / (norms_q * norms_doc) + df["cos_float"] = df["cos_double"].astype(np.float32) + return df + + +def similarity_feature_view( + sources: Dict[str, Union[RequestSource, FeatureView]], + infer_features: bool = False, + features: Optional[List[Feature]] = None, +) -> OnDemandFeatureView: + _fields = [ + Field(name="cos_double", dtype=Float64), + Field(name="cos_float", dtype=Float32), + ] + if features is not None: + _fields = [Field.from_feature(feature) for feature in features] + + return OnDemandFeatureView( + name=similarity.__name__, + sources=sources, + schema=[] if infer_features else _fields, + udf=similarity, + ) + + +def create_conv_rate_request_source(): + return RequestSource( + name="conv_rate_input", schema=[Field(name="val_to_add", dtype=Int32)], + ) + + +def create_similarity_request_source(): + return RequestSource( + name="similarity_input", + schema={ + "vector_double": ValueType.DOUBLE_LIST, + "vector_float": ValueType.FLOAT_LIST, + }, + ) + + +def create_item_embeddings_feature_view(source, infer_features: bool = False): + item_embeddings_feature_view = FeatureView( + name="item_embeddings", + entities=["item"], + schema=None + if infer_features + else [ + Field(name="embedding_double", dtype=Array(Float64)), + Field(name="embedding_float", dtype=Array(Float32)), + ], + batch_source=source, + ttl=timedelta(hours=2), + ) + return item_embeddings_feature_view + + +def create_driver_hourly_stats_feature_view(source, infer_features: bool = False): + driver_stats_feature_view = FeatureView( + name="driver_stats", + entities=["driver"], + schema=None + if infer_features + else [ + Field(name="conv_rate", dtype=Float32), + Field(name="acc_rate", dtype=Float32), + Field(name="avg_daily_trips", dtype=Int32), + ], + source=source, + ttl=timedelta(hours=2), + ) + return driver_stats_feature_view + + +def create_customer_daily_profile_feature_view(source, infer_features: bool = False): + customer_profile_feature_view = FeatureView( + name="customer_profile", + entities=["customer_id"], + schema=None + if infer_features + else [ + Field(name="current_balance", dtype=Float32), + Field(name="avg_passenger_count", dtype=Float32), + Field(name="lifetime_trip_count", dtype=Int32), + ], + source=source, + ttl=timedelta(days=2), + ) + return customer_profile_feature_view + + +def create_global_stats_feature_view(source, infer_features: bool = False): + global_stats_feature_view = FeatureView( + name="global_stats", + entities=[], + features=None + if infer_features + else [ + # Test that Features still work for FeatureViews. + Feature(name="num_rides", dtype=ValueType.INT32), + Feature(name="avg_ride_length", dtype=ValueType.FLOAT), + ], + source=source, + ttl=timedelta(days=2), + ) + return global_stats_feature_view + + +def create_order_feature_view(source, infer_features: bool = False): + return FeatureView( + name="order", + entities=["driver", "customer_id"], + schema=None + if infer_features + else [Field(name="order_is_success", dtype=Int32)], + source=source, + ttl=timedelta(days=2), + ) + + +def create_location_stats_feature_view(source, infer_features: bool = False): + location_stats_feature_view = FeatureView( + name="location_stats", + entities=[location()], + schema=None if infer_features else [Field(name="temperature", dtype=Int32)], + source=source, + ttl=timedelta(days=2), + ) + return location_stats_feature_view + + +def create_field_mapping_feature_view(source): + return FeatureView( + name="field_mapping", + entities=[], + # Test that Features still work for FeatureViews. + features=[Feature(name="feature_name", dtype=ValueType.INT32)], + source=source, + ttl=timedelta(days=2), + ) + + +def create_pushable_feature_view(batch_source: DataSource): + push_source = PushSource( + name="location_stats_push_source", batch_source=batch_source, + ) + return FeatureView( + name="pushable_location_stats", + entities=["location_id"], + # Test that Features still work for FeatureViews. + features=[Feature(name="temperature", dtype=ValueType.INT32)], + ttl=timedelta(days=2), + source=push_source, + ) diff --git a/sdk/python/tests/integration/feature_repos/universal/online_store/__init__.py b/sdk/python/tests/integration/feature_repos/universal/online_store/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/tests/integration/feature_repos/universal/online_store/datastore.py b/sdk/python/tests/integration/feature_repos/universal/online_store/datastore.py new file mode 100644 index 0000000000..52851e80d8 --- /dev/null +++ b/sdk/python/tests/integration/feature_repos/universal/online_store/datastore.py @@ -0,0 +1,38 @@ +import os +from typing import Dict + +from google.cloud import datastore +from testcontainers.core.container import DockerContainer +from testcontainers.core.waiting_utils import wait_for_logs + +from tests.integration.feature_repos.universal.online_store_creator import ( + OnlineStoreCreator, +) + + +class DatastoreOnlineStoreCreator(OnlineStoreCreator): + def __init__(self, project_name: str): + super().__init__(project_name) + self.container = ( + DockerContainer( + "gcr.io/google.com/cloudsdktool/cloud-sdk:380.0.0-emulators" + ) + .with_command( + "gcloud beta emulators datastore start --project test-project --host-port 0.0.0.0:8081" + ) + .with_exposed_ports("8081") + ) + + def create_online_store(self) -> Dict[str, str]: + self.container.start() + log_string_to_wait_for = r"\[datastore\] Dev App Server is now running" + wait_for_logs( + container=self.container, predicate=log_string_to_wait_for, timeout=5 + ) + exposed_port = self.container.get_exposed_port("8081") + os.environ[datastore.client.DATASTORE_EMULATOR_HOST] = f"0.0.0.0:{exposed_port}" + return {"type": "datastore", "project_id": "test-project"} + + def teardown(self): + del os.environ[datastore.client.DATASTORE_EMULATOR_HOST] + self.container.stop() diff --git a/sdk/python/tests/integration/feature_repos/universal/online_store/dynamodb.py b/sdk/python/tests/integration/feature_repos/universal/online_store/dynamodb.py new file mode 100644 index 0000000000..e4d8e0c3d0 --- /dev/null +++ b/sdk/python/tests/integration/feature_repos/universal/online_store/dynamodb.py @@ -0,0 +1,34 @@ +from typing import Dict + +from testcontainers.core.container import DockerContainer +from testcontainers.core.waiting_utils import wait_for_logs + +from tests.integration.feature_repos.universal.online_store_creator import ( + OnlineStoreCreator, +) + + +class DynamoDBOnlineStoreCreator(OnlineStoreCreator): + def __init__(self, project_name: str): + super().__init__(project_name) + self.container = DockerContainer( + "amazon/dynamodb-local:latest" + ).with_exposed_ports("8000") + + def create_online_store(self) -> Dict[str, str]: + self.container.start() + log_string_to_wait_for = ( + "Initializing DynamoDB Local with the following configuration:" + ) + wait_for_logs( + container=self.container, predicate=log_string_to_wait_for, timeout=5 + ) + exposed_port = self.container.get_exposed_port("8000") + return { + "type": "dynamodb", + "endpoint_url": f"http://localhost:{exposed_port}", + "region": "us-west-2", + } + + def teardown(self): + self.container.stop() diff --git a/sdk/python/tests/integration/feature_repos/universal/online_store/redis.py b/sdk/python/tests/integration/feature_repos/universal/online_store/redis.py new file mode 100644 index 0000000000..073760f514 --- /dev/null +++ b/sdk/python/tests/integration/feature_repos/universal/online_store/redis.py @@ -0,0 +1,26 @@ +from typing import Dict + +from testcontainers.core.container import DockerContainer +from testcontainers.core.waiting_utils import wait_for_logs + +from tests.integration.feature_repos.universal.online_store_creator import ( + OnlineStoreCreator, +) + + +class RedisOnlineStoreCreator(OnlineStoreCreator): + def __init__(self, project_name: str): + super().__init__(project_name) + self.container = DockerContainer("redis").with_exposed_ports("6379") + + def create_online_store(self) -> Dict[str, str]: + self.container.start() + log_string_to_wait_for = "Ready to accept connections" + wait_for_logs( + container=self.container, predicate=log_string_to_wait_for, timeout=5 + ) + exposed_port = self.container.get_exposed_port("6379") + return {"type": "redis", "connection_string": f"localhost:{exposed_port},db=0"} + + def teardown(self): + self.container.stop() diff --git a/sdk/python/tests/integration/feature_repos/universal/online_store_creator.py b/sdk/python/tests/integration/feature_repos/universal/online_store_creator.py new file mode 100644 index 0000000000..0fa0dbed3e --- /dev/null +++ b/sdk/python/tests/integration/feature_repos/universal/online_store_creator.py @@ -0,0 +1,14 @@ +from abc import ABC + +from feast.repo_config import FeastConfigBaseModel + + +class OnlineStoreCreator(ABC): + def __init__(self, project_name: str): + self.project_name = project_name + + def create_online_store(self) -> FeastConfigBaseModel: + ... + + def teardown(self): + ... diff --git a/sdk/python/tests/integration/materialization/test_offline_online_store_consistency.py b/sdk/python/tests/integration/materialization/test_offline_online_store_consistency.py deleted file mode 100644 index 41741e0f96..0000000000 --- a/sdk/python/tests/integration/materialization/test_offline_online_store_consistency.py +++ /dev/null @@ -1,468 +0,0 @@ -import contextlib -import math -import random -import tempfile -import time -import uuid -from datetime import datetime, timedelta -from pathlib import Path -from typing import Iterator, Optional, Tuple - -import pandas as pd -import pytest -from google.cloud import bigquery -from pytz import timezone, utc - -from feast import BigQuerySource, FileSource, RedshiftSource -from feast.data_format import ParquetFormat -from feast.data_source import DataSource -from feast.entity import Entity -from feast.feature import Feature -from feast.feature_store import FeatureStore -from feast.feature_view import FeatureView -from feast.infra.offline_stores.file import FileOfflineStoreConfig -from feast.infra.offline_stores.redshift import RedshiftOfflineStoreConfig -from feast.infra.online_stores.datastore import DatastoreOnlineStoreConfig -from feast.infra.online_stores.dynamodb import DynamoDBOnlineStoreConfig -from feast.infra.online_stores.redis import RedisOnlineStoreConfig, RedisType -from feast.infra.online_stores.sqlite import SqliteOnlineStoreConfig -from feast.infra.utils import aws_utils -from feast.repo_config import RepoConfig -from feast.value_type import ValueType - - -def create_dataset() -> pd.DataFrame: - now = datetime.utcnow() - ts = pd.Timestamp(now).round("ms") - data = { - "id": [1, 2, 1, 3, 3], - "value": [0.1, None, 0.3, 4, 5], - "ts_1": [ - ts - timedelta(hours=4), - ts, - ts - timedelta(hours=3), - # Use different time zones to test tz-naive -> tz-aware conversion - (ts - timedelta(hours=4)) - .replace(tzinfo=utc) - .astimezone(tz=timezone("Europe/Berlin")), - (ts - timedelta(hours=1)) - .replace(tzinfo=utc) - .astimezone(tz=timezone("US/Pacific")), - ], - "created_ts": [ts, ts, ts, ts, ts], - } - return pd.DataFrame.from_dict(data) - - -def get_feature_view(data_source: DataSource) -> FeatureView: - return FeatureView( - name="test_bq_correctness", - entities=["driver"], - features=[Feature("value", ValueType.FLOAT)], - ttl=timedelta(days=5), - input=data_source, - ) - - -# bq_source_type must be one of "query" and "table" -@contextlib.contextmanager -def prep_bq_fs_and_fv( - bq_source_type: str, -) -> Iterator[Tuple[FeatureStore, FeatureView]]: - client = bigquery.Client() - gcp_project = client.project - bigquery_dataset = "test_ingestion" - dataset = bigquery.Dataset(f"{gcp_project}.{bigquery_dataset}") - client.create_dataset(dataset, exists_ok=True) - dataset.default_table_expiration_ms = ( - 1000 * 60 * 60 * 24 * 14 - ) # 2 weeks in milliseconds - client.update_dataset(dataset, ["default_table_expiration_ms"]) - - df = create_dataset() - - job_config = bigquery.LoadJobConfig() - table_ref = f"{gcp_project}.{bigquery_dataset}.{bq_source_type}_correctness_{int(time.time_ns())}" - query = f"SELECT * FROM `{table_ref}`" - job = client.load_table_from_dataframe(df, table_ref, job_config=job_config) - job.result() - - bigquery_source = BigQuerySource( - table_ref=table_ref if bq_source_type == "table" else None, - query=query if bq_source_type == "query" else None, - event_timestamp_column="ts", - created_timestamp_column="created_ts", - date_partition_column="", - field_mapping={"ts_1": "ts", "id": "driver_id"}, - ) - - fv = get_feature_view(bigquery_source) - e = Entity( - name="driver", - description="id for driver", - join_key="driver_id", - value_type=ValueType.INT32, - ) - with tempfile.TemporaryDirectory() as repo_dir_name: - config = RepoConfig( - registry=str(Path(repo_dir_name) / "registry.db"), - project=f"test_bq_correctness_{str(uuid.uuid4()).replace('-', '')}", - provider="gcp", - online_store=DatastoreOnlineStoreConfig(namespace="integration_test"), - ) - fs = FeatureStore(config=config) - fs.apply([fv, e]) - - yield fs, fv - - fs.teardown() - - -@contextlib.contextmanager -def prep_redshift_fs_and_fv( - source_type: str, -) -> Iterator[Tuple[FeatureStore, FeatureView]]: - client = aws_utils.get_redshift_data_client("us-west-2") - s3 = aws_utils.get_s3_resource("us-west-2") - - df = create_dataset() - - table_name = f"test_ingestion_{source_type}_correctness_{int(time.time_ns())}_{random.randint(1000, 9999)}" - - offline_store = RedshiftOfflineStoreConfig( - cluster_id="feast-integration-tests", - region="us-west-2", - user="admin", - database="feast", - s3_staging_location="s3://feast-integration-tests/redshift/tests/ingestion", - iam_role="arn:aws:iam::402087665549:role/redshift_s3_access_role", - ) - - aws_utils.upload_df_to_redshift( - client, - offline_store.cluster_id, - offline_store.database, - offline_store.user, - s3, - f"{offline_store.s3_staging_location}/copy/{table_name}.parquet", - offline_store.iam_role, - table_name, - df, - ) - - redshift_source = RedshiftSource( - table=table_name if source_type == "table" else None, - query=f"SELECT * FROM {table_name}" if source_type == "query" else None, - event_timestamp_column="ts", - created_timestamp_column="created_ts", - date_partition_column="", - field_mapping={"ts_1": "ts", "id": "driver_id"}, - ) - - fv = get_feature_view(redshift_source) - e = Entity( - name="driver", - description="id for driver", - join_key="driver_id", - value_type=ValueType.INT32, - ) - with tempfile.TemporaryDirectory() as repo_dir_name, tempfile.TemporaryDirectory() as data_dir_name: - config = RepoConfig( - registry=str(Path(repo_dir_name) / "registry.db"), - project=f"test_bq_correctness_{str(uuid.uuid4()).replace('-', '')}", - provider="local", - online_store=SqliteOnlineStoreConfig( - path=str(Path(data_dir_name) / "online_store.db") - ), - offline_store=offline_store, - ) - fs = FeatureStore(config=config) - fs.apply([fv, e]) - - yield fs, fv - - # Clean up the uploaded Redshift table - aws_utils.execute_redshift_statement( - client, - offline_store.cluster_id, - offline_store.database, - offline_store.user, - f"DROP TABLE {table_name}", - ) - - -@contextlib.contextmanager -def prep_local_fs_and_fv() -> Iterator[Tuple[FeatureStore, FeatureView]]: - with tempfile.NamedTemporaryFile(suffix=".parquet") as f: - df = create_dataset() - f.close() - df.to_parquet(f.name) - file_source = FileSource( - file_format=ParquetFormat(), - file_url=f"file://{f.name}", - event_timestamp_column="ts", - created_timestamp_column="created_ts", - date_partition_column="", - field_mapping={"ts_1": "ts", "id": "driver_id"}, - ) - fv = get_feature_view(file_source) - e = Entity( - name="driver", - description="id for driver", - join_key="driver_id", - value_type=ValueType.INT32, - ) - project = f"test_local_correctness_{str(uuid.uuid4()).replace('-', '')}" - print(f"Using project: {project}") - - with tempfile.TemporaryDirectory() as repo_dir_name, tempfile.TemporaryDirectory() as data_dir_name: - config = RepoConfig( - registry=str(Path(repo_dir_name) / "registry.db"), - project=project, - provider="local", - online_store=SqliteOnlineStoreConfig( - path=str(Path(data_dir_name) / "online_store.db") - ), - ) - fs = FeatureStore(config=config) - fs.apply([fv, e]) - - yield fs, fv - - fs.teardown() - - -@contextlib.contextmanager -def prep_redis_fs_and_fv() -> Iterator[Tuple[FeatureStore, FeatureView]]: - with tempfile.NamedTemporaryFile(suffix=".parquet") as f: - df = create_dataset() - f.close() - df.to_parquet(f.name) - file_source = FileSource( - file_format=ParquetFormat(), - file_url=f"file://{f.name}", - event_timestamp_column="ts", - created_timestamp_column="created_ts", - date_partition_column="", - field_mapping={"ts_1": "ts", "id": "driver_id"}, - ) - fv = get_feature_view(file_source) - e = Entity( - name="driver", - description="id for driver", - join_key="driver_id", - value_type=ValueType.INT32, - ) - project = f"test_redis_correctness_{str(uuid.uuid4()).replace('-', '')}" - print(f"Using project: {project}") - with tempfile.TemporaryDirectory() as repo_dir_name: - config = RepoConfig( - registry=str(Path(repo_dir_name) / "registry.db"), - project=project, - provider="local", - online_store=RedisOnlineStoreConfig( - type="redis", - redis_type=RedisType.redis, - connection_string="localhost:6379,db=0", - ), - ) - fs = FeatureStore(config=config) - fs.apply([fv, e]) - - yield fs, fv - - fs.teardown() - - -@contextlib.contextmanager -def prep_dynamodb_fs_and_fv() -> Iterator[Tuple[FeatureStore, FeatureView]]: - with tempfile.NamedTemporaryFile(suffix=".parquet") as f: - df = create_dataset() - f.close() - df.to_parquet(f.name) - file_source = FileSource( - file_format=ParquetFormat(), - file_url=f"file://{f.name}", - event_timestamp_column="ts", - created_timestamp_column="created_ts", - date_partition_column="", - field_mapping={"ts_1": "ts", "id": "driver_id"}, - ) - fv = get_feature_view(file_source) - e = Entity( - name="driver", - description="id for driver", - join_key="driver_id", - value_type=ValueType.INT32, - ) - project = f"test_dynamo_correctness_{str(uuid.uuid4()).replace('-', '')}" - print(f"Using project {project}") - with tempfile.TemporaryDirectory() as repo_dir_name: - config = RepoConfig( - registry=str(Path(repo_dir_name) / "registry.db"), - project=project, - provider="aws", - online_store=DynamoDBOnlineStoreConfig(region="us-west-2"), - offline_store=FileOfflineStoreConfig(), - ) - fs = FeatureStore(config=config) - fs.apply([fv, e]) - - yield fs, fv - - fs.teardown() - - -# Checks that both offline & online store values are as expected -def check_offline_and_online_features( - fs: FeatureStore, - fv: FeatureView, - driver_id: int, - event_timestamp: datetime, - expected_value: Optional[float], - full_feature_names: bool, - check_offline_store: bool = True, -) -> None: - # Check online store - response_dict = fs.get_online_features( - [f"{fv.name}:value"], - [{"driver": driver_id}], - full_feature_names=full_feature_names, - ).to_dict() - - if full_feature_names: - if expected_value: - assert abs(response_dict[f"{fv.name}__value"][0] - expected_value) < 1e-6 - else: - assert response_dict[f"{fv.name}__value"][0] is None - else: - if expected_value: - assert abs(response_dict["value"][0] - expected_value) < 1e-6 - else: - assert response_dict["value"][0] is None - - # Check offline store - if check_offline_store: - df = fs.get_historical_features( - entity_df=pd.DataFrame.from_dict( - {"driver_id": [driver_id], "event_timestamp": [event_timestamp]} - ), - feature_refs=[f"{fv.name}:value"], - full_feature_names=full_feature_names, - ).to_df() - - if full_feature_names: - if expected_value: - assert abs(df.to_dict()[f"{fv.name}__value"][0] - expected_value) < 1e-6 - else: - assert math.isnan(df.to_dict()[f"{fv.name}__value"][0]) - else: - if expected_value: - assert abs(df.to_dict()["value"][0] - expected_value) < 1e-6 - else: - assert math.isnan(df.to_dict()["value"][0]) - - -def run_offline_online_store_consistency_test( - fs: FeatureStore, - fv: FeatureView, - full_feature_names: bool, - check_offline_store: bool = True, -) -> None: - now = datetime.utcnow() - # Run materialize() - # use both tz-naive & tz-aware timestamps to test that they're both correctly handled - start_date = (now - timedelta(hours=5)).replace(tzinfo=utc) - end_date = now - timedelta(hours=2) - fs.materialize(feature_views=[fv.name], start_date=start_date, end_date=end_date) - - # check result of materialize() - check_offline_and_online_features( - fs=fs, - fv=fv, - driver_id=1, - event_timestamp=end_date, - expected_value=0.3, - full_feature_names=full_feature_names, - check_offline_store=check_offline_store, - ) - - check_offline_and_online_features( - fs=fs, - fv=fv, - driver_id=2, - event_timestamp=end_date, - expected_value=None, - full_feature_names=full_feature_names, - check_offline_store=check_offline_store, - ) - - # check prior value for materialize_incremental() - check_offline_and_online_features( - fs=fs, - fv=fv, - driver_id=3, - event_timestamp=end_date, - expected_value=4, - full_feature_names=full_feature_names, - check_offline_store=check_offline_store, - ) - - # run materialize_incremental() - fs.materialize_incremental(feature_views=[fv.name], end_date=now) - - # check result of materialize_incremental() - check_offline_and_online_features( - fs=fs, - fv=fv, - driver_id=3, - event_timestamp=now, - expected_value=5, - full_feature_names=full_feature_names, - check_offline_store=check_offline_store, - ) - - -@pytest.mark.integration -@pytest.mark.parametrize( - "bq_source_type", ["query", "table"], -) -@pytest.mark.parametrize("full_feature_names", [True, False]) -def test_bq_offline_online_store_consistency( - bq_source_type: str, full_feature_names: bool -): - with prep_bq_fs_and_fv(bq_source_type) as (fs, fv): - run_offline_online_store_consistency_test(fs, fv, full_feature_names) - - -@pytest.mark.parametrize("full_feature_names", [True, False]) -@pytest.mark.integration -def test_redis_offline_online_store_consistency(full_feature_names: bool): - with prep_redis_fs_and_fv() as (fs, fv): - run_offline_online_store_consistency_test(fs, fv, full_feature_names) - - -@pytest.mark.parametrize("full_feature_names", [True, False]) -@pytest.mark.integration -def test_dynamodb_offline_online_store_consistency(full_feature_names: bool): - with prep_dynamodb_fs_and_fv() as (fs, fv): - run_offline_online_store_consistency_test(fs, fv, full_feature_names) - - -@pytest.mark.integration -@pytest.mark.parametrize( - "source_type", ["query", "table"], -) -@pytest.mark.parametrize("full_feature_names", [True, False]) -def test_redshift_offline_online_store_consistency( - source_type: str, full_feature_names: bool -): - with prep_redshift_fs_and_fv(source_type) as (fs, fv): - # TODO: remove check_offline_store parameter once Redshift's get_historical_features is implemented - run_offline_online_store_consistency_test(fs, fv, full_feature_names, False) - - -@pytest.mark.parametrize("full_feature_names", [True, False]) -def test_local_offline_online_store_consistency(full_feature_names: bool): - with prep_local_fs_and_fv() as (fs, fv): - run_offline_online_store_consistency_test(fs, fv, full_feature_names) diff --git a/sdk/python/tests/integration/offline_store/test_historical_retrieval.py b/sdk/python/tests/integration/offline_store/test_historical_retrieval.py deleted file mode 100644 index 087b578365..0000000000 --- a/sdk/python/tests/integration/offline_store/test_historical_retrieval.py +++ /dev/null @@ -1,670 +0,0 @@ -import os -import random -import string -import time -from datetime import datetime, timedelta -from tempfile import TemporaryDirectory - -import assertpy -import numpy as np -import pandas as pd -import pytest -from google.cloud import bigquery -from pandas.testing import assert_frame_equal -from pytz import utc - -import feast.driver_test_data as driver_data -from feast import BigQuerySource, FileSource, RepoConfig, errors, utils -from feast.entity import Entity -from feast.errors import FeatureNameCollisionError -from feast.feature import Feature -from feast.feature_store import FeatureStore, _validate_feature_refs -from feast.feature_view import FeatureView -from feast.infra.offline_stores.bigquery import ( - BigQueryOfflineStoreConfig, - _get_entity_df_timestamp_bounds, -) -from feast.infra.online_stores.sqlite import SqliteOnlineStoreConfig -from feast.infra.provider import DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL -from feast.value_type import ValueType - -np.random.seed(0) - -PROJECT_NAME = "default" - - -def generate_entities(date, infer_event_timestamp_col, order_count: int = 1000): - end_date = date - before_start_date = end_date - timedelta(days=365) - start_date = end_date - timedelta(days=7) - after_end_date = end_date + timedelta(days=365) - customer_entities = list(range(1001, 1110)) - driver_entities = list(range(5001, 5110)) - orders_df = driver_data.create_orders_df( - customers=customer_entities, - drivers=driver_entities, - start_date=before_start_date, - end_date=after_end_date, - order_count=order_count, - infer_event_timestamp_col=infer_event_timestamp_col, - ) - return customer_entities, driver_entities, end_date, orders_df, start_date - - -def stage_driver_hourly_stats_parquet_source(directory, df): - # Write to disk - driver_stats_path = os.path.join(directory, "driver_stats.parquet") - df.to_parquet(path=driver_stats_path, allow_truncated_timestamps=True) - return FileSource( - path=driver_stats_path, - event_timestamp_column="datetime", - created_timestamp_column="", - ) - - -def stage_driver_hourly_stats_bigquery_source(df, table_id): - client = bigquery.Client() - job_config = bigquery.LoadJobConfig() - df.reset_index(drop=True, inplace=True) - job = client.load_table_from_dataframe(df, table_id, job_config=job_config) - job.result() - - -def create_driver_hourly_stats_feature_view(source): - driver_stats_feature_view = FeatureView( - name="driver_stats", - entities=["driver"], - features=[ - Feature(name="conv_rate", dtype=ValueType.FLOAT), - Feature(name="acc_rate", dtype=ValueType.FLOAT), - Feature(name="avg_daily_trips", dtype=ValueType.INT32), - ], - input=source, - ttl=timedelta(hours=2), - ) - return driver_stats_feature_view - - -def stage_customer_daily_profile_parquet_source(directory, df): - customer_profile_path = os.path.join(directory, "customer_profile.parquet") - df.to_parquet(path=customer_profile_path, allow_truncated_timestamps=True) - return FileSource( - path=customer_profile_path, - event_timestamp_column="datetime", - created_timestamp_column="created", - ) - - -def stage_customer_daily_profile_bigquery_source(df, table_id): - client = bigquery.Client() - job_config = bigquery.LoadJobConfig() - df.reset_index(drop=True, inplace=True) - job = client.load_table_from_dataframe(df, table_id, job_config=job_config) - job.result() - - -def create_customer_daily_profile_feature_view(source): - customer_profile_feature_view = FeatureView( - name="customer_profile", - entities=["customer_id"], - features=[ - Feature(name="current_balance", dtype=ValueType.FLOAT), - Feature(name="avg_passenger_count", dtype=ValueType.FLOAT), - Feature(name="lifetime_trip_count", dtype=ValueType.INT32), - Feature(name="avg_daily_trips", dtype=ValueType.INT32), - ], - input=source, - ttl=timedelta(days=2), - ) - return customer_profile_feature_view - - -# Converts the given column of the pandas records to UTC timestamps -def convert_timestamp_records_to_utc(records, column): - for record in records: - record[column] = utils.make_tzaware(record[column]).astimezone(utc) - return records - - -# Find the latest record in the given time range and filter -def find_asof_record(records, ts_key, ts_start, ts_end, filter_key, filter_value): - found_record = {} - for record in records: - if record[filter_key] == filter_value and ts_start <= record[ts_key] <= ts_end: - if not found_record or found_record[ts_key] < record[ts_key]: - found_record = record - return found_record - - -def get_expected_training_df( - customer_df: pd.DataFrame, - customer_fv: FeatureView, - driver_df: pd.DataFrame, - driver_fv: FeatureView, - orders_df: pd.DataFrame, - event_timestamp: str, - full_feature_names: bool = False, -): - # Convert all pandas dataframes into records with UTC timestamps - order_records = convert_timestamp_records_to_utc( - orders_df.to_dict("records"), event_timestamp - ) - driver_records = convert_timestamp_records_to_utc( - driver_df.to_dict("records"), driver_fv.input.event_timestamp_column - ) - customer_records = convert_timestamp_records_to_utc( - customer_df.to_dict("records"), customer_fv.input.event_timestamp_column - ) - - # Manually do point-in-time join of orders to drivers and customers records - for order_record in order_records: - driver_record = find_asof_record( - driver_records, - ts_key=driver_fv.input.event_timestamp_column, - ts_start=order_record[event_timestamp] - driver_fv.ttl, - ts_end=order_record[event_timestamp], - filter_key="driver_id", - filter_value=order_record["driver_id"], - ) - customer_record = find_asof_record( - customer_records, - ts_key=customer_fv.input.event_timestamp_column, - ts_start=order_record[event_timestamp] - customer_fv.ttl, - ts_end=order_record[event_timestamp], - filter_key="customer_id", - filter_value=order_record["customer_id"], - ) - - order_record.update( - { - (f"driver_stats__{k}" if full_feature_names else k): driver_record.get( - k, None - ) - for k in ("conv_rate", "avg_daily_trips") - } - ) - - order_record.update( - { - ( - f"customer_profile__{k}" if full_feature_names else k - ): customer_record.get(k, None) - for k in ( - "current_balance", - "avg_passenger_count", - "lifetime_trip_count", - ) - } - ) - - # Convert records back to pandas dataframe - expected_df = pd.DataFrame(order_records) - - # Move "datetime" column to front - current_cols = expected_df.columns.tolist() - current_cols.remove(event_timestamp) - expected_df = expected_df[[event_timestamp] + current_cols] - - # Cast some columns to expected types, since we lose information when converting pandas DFs into Python objects. - if full_feature_names: - expected_column_types = { - "order_is_success": "int32", - "driver_stats__conv_rate": "float32", - "customer_profile__current_balance": "float32", - "customer_profile__avg_passenger_count": "float32", - } - else: - expected_column_types = { - "order_is_success": "int32", - "conv_rate": "float32", - "current_balance": "float32", - "avg_passenger_count": "float32", - } - - for col, typ in expected_column_types.items(): - expected_df[col] = expected_df[col].astype(typ) - - return expected_df - - -def stage_orders_bigquery(df, table_id): - client = bigquery.Client() - job_config = bigquery.LoadJobConfig() - df.reset_index(drop=True, inplace=True) - job = client.load_table_from_dataframe(df, table_id, job_config=job_config) - job.result() - - -class BigQueryDataSet: - def __init__(self, dataset_name): - self.name = dataset_name - - def __enter__(self): - client = bigquery.Client() - dataset = bigquery.Dataset(f"{client.project}.{self.name}") - dataset.location = "US" - dataset = client.create_dataset(dataset, exists_ok=True) - return dataset - - def __exit__(self, exc_type, exc_value, exc_traceback): - print("Tearing down BigQuery dataset") - client = bigquery.Client() - dataset_id = f"{client.project}.{self.name}" - - client.delete_dataset(dataset_id, delete_contents=True, not_found_ok=True) - print(f"Deleted dataset '{dataset_id}'") - if exc_type: - print( - "***Logging exception {}***".format( - (exc_type, exc_value, exc_traceback) - ) - ) - - -@pytest.mark.parametrize( - "infer_event_timestamp_col", [False, True], -) -@pytest.mark.parametrize( - "full_feature_names", [False, True], -) -def test_historical_features_from_parquet_sources( - infer_event_timestamp_col, full_feature_names -): - start_date = datetime.now().replace(microsecond=0, second=0, minute=0) - ( - customer_entities, - driver_entities, - end_date, - orders_df, - start_date, - ) = generate_entities(start_date, infer_event_timestamp_col) - - with TemporaryDirectory() as temp_dir: - driver_df = driver_data.create_driver_hourly_stats_df( - driver_entities, start_date, end_date - ) - driver_source = stage_driver_hourly_stats_parquet_source(temp_dir, driver_df) - driver_fv = create_driver_hourly_stats_feature_view(driver_source) - customer_df = driver_data.create_customer_daily_profile_df( - customer_entities, start_date, end_date - ) - customer_source = stage_customer_daily_profile_parquet_source( - temp_dir, customer_df - ) - customer_fv = create_customer_daily_profile_feature_view(customer_source) - driver = Entity(name="driver", join_key="driver_id", value_type=ValueType.INT64) - customer = Entity(name="customer_id", value_type=ValueType.INT64) - - store = FeatureStore( - config=RepoConfig( - registry=os.path.join(temp_dir, "registry.db"), - project="default", - provider="local", - online_store=SqliteOnlineStoreConfig( - path=os.path.join(temp_dir, "online_store.db") - ), - ) - ) - - store.apply([driver, customer, driver_fv, customer_fv]) - - job = store.get_historical_features( - entity_df=orders_df, - feature_refs=[ - "driver_stats:conv_rate", - "driver_stats:avg_daily_trips", - "customer_profile:current_balance", - "customer_profile:avg_passenger_count", - "customer_profile:lifetime_trip_count", - ], - full_feature_names=full_feature_names, - ) - - actual_df = job.to_df() - event_timestamp = ( - DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL - if DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL in orders_df.columns - else "e_ts" - ) - expected_df = get_expected_training_df( - customer_df, - customer_fv, - driver_df, - driver_fv, - orders_df, - event_timestamp, - full_feature_names=full_feature_names, - ) - assert_frame_equal( - expected_df.sort_values( - by=[event_timestamp, "order_id", "driver_id", "customer_id"] - ).reset_index(drop=True), - actual_df.sort_values( - by=[event_timestamp, "order_id", "driver_id", "customer_id"] - ).reset_index(drop=True), - ) - - -@pytest.mark.integration -@pytest.mark.parametrize( - "provider_type", ["local", "gcp", "gcp_custom_offline_config"], -) -@pytest.mark.parametrize( - "infer_event_timestamp_col", [False, True], -) -@pytest.mark.parametrize( - "full_feature_names", [False, True], -) -def test_historical_features_from_bigquery_sources( - provider_type, infer_event_timestamp_col, capsys, full_feature_names -): - start_date = datetime.now().replace(microsecond=0, second=0, minute=0) - ( - customer_entities, - driver_entities, - end_date, - orders_df, - start_date, - ) = generate_entities(start_date, infer_event_timestamp_col) - - bigquery_dataset = ( - f"test_hist_retrieval_{int(time.time_ns())}_{random.randint(1000, 9999)}" - ) - - with BigQueryDataSet(bigquery_dataset), TemporaryDirectory() as temp_dir: - gcp_project = bigquery.Client().project - - # Orders Query - table_id = f"{bigquery_dataset}.orders" - stage_orders_bigquery(orders_df, table_id) - entity_df_query = f"SELECT * FROM {gcp_project}.{table_id}" - - # Driver Feature View - driver_df = driver_data.create_driver_hourly_stats_df( - driver_entities, start_date, end_date - ) - driver_table_id = f"{gcp_project}.{bigquery_dataset}.driver_hourly" - stage_driver_hourly_stats_bigquery_source(driver_df, driver_table_id) - driver_source = BigQuerySource( - table_ref=driver_table_id, - event_timestamp_column="datetime", - created_timestamp_column="created", - ) - driver_fv = create_driver_hourly_stats_feature_view(driver_source) - - # Customer Feature View - customer_df = driver_data.create_customer_daily_profile_df( - customer_entities, start_date, end_date - ) - customer_table_id = f"{gcp_project}.{bigquery_dataset}.customer_profile" - - stage_customer_daily_profile_bigquery_source(customer_df, customer_table_id) - customer_source = BigQuerySource( - table_ref=customer_table_id, - event_timestamp_column="datetime", - created_timestamp_column="", - ) - customer_fv = create_customer_daily_profile_feature_view(customer_source) - - driver = Entity(name="driver", join_key="driver_id", value_type=ValueType.INT64) - customer = Entity(name="customer_id", value_type=ValueType.INT64) - - if provider_type == "local": - store = FeatureStore( - config=RepoConfig( - registry=os.path.join(temp_dir, "registry.db"), - project="default", - provider="local", - online_store=SqliteOnlineStoreConfig( - path=os.path.join(temp_dir, "online_store.db"), - ), - offline_store=BigQueryOfflineStoreConfig( - type="bigquery", dataset=bigquery_dataset - ), - ) - ) - elif provider_type == "gcp": - store = FeatureStore( - config=RepoConfig( - registry=os.path.join(temp_dir, "registry.db"), - project="".join( - random.choices(string.ascii_uppercase + string.digits, k=10) - ), - provider="gcp", - offline_store=BigQueryOfflineStoreConfig( - type="bigquery", dataset=bigquery_dataset - ), - ) - ) - elif provider_type == "gcp_custom_offline_config": - store = FeatureStore( - config=RepoConfig( - registry=os.path.join(temp_dir, "registry.db"), - project="".join( - random.choices(string.ascii_uppercase + string.digits, k=10) - ), - provider="gcp", - offline_store=BigQueryOfflineStoreConfig( - type="bigquery", dataset="foo" - ), - ) - ) - else: - raise Exception("Invalid provider used as part of test configuration") - - store.apply([driver, customer, driver_fv, customer_fv]) - - event_timestamp = ( - DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL - if DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL in orders_df.columns - else "e_ts" - ) - expected_df = get_expected_training_df( - customer_df, - customer_fv, - driver_df, - driver_fv, - orders_df, - event_timestamp, - full_feature_names, - ) - - job_from_sql = store.get_historical_features( - entity_df=entity_df_query, - feature_refs=[ - "driver_stats:conv_rate", - "driver_stats:avg_daily_trips", - "customer_profile:current_balance", - "customer_profile:avg_passenger_count", - "customer_profile:lifetime_trip_count", - ], - full_feature_names=full_feature_names, - ) - - start_time = datetime.utcnow() - actual_df_from_sql_entities = job_from_sql.to_df() - end_time = datetime.utcnow() - with capsys.disabled(): - print( - str( - f"\nTime to execute job_from_sql.to_df() = '{(end_time - start_time)}'" - ) - ) - - assert sorted(expected_df.columns) == sorted( - actual_df_from_sql_entities.columns - ) - assert_frame_equal( - expected_df.sort_values( - by=[event_timestamp, "order_id", "driver_id", "customer_id"] - ).reset_index(drop=True), - actual_df_from_sql_entities[expected_df.columns] - .sort_values(by=[event_timestamp, "order_id", "driver_id", "customer_id"]) - .reset_index(drop=True), - check_dtype=False, - ) - - table_from_sql_entities = job_from_sql.to_arrow() - assert_frame_equal( - actual_df_from_sql_entities, table_from_sql_entities.to_pandas() - ) - - timestamp_column = ( - "e_ts" - if infer_event_timestamp_col - else DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL - ) - - entity_df_query_with_invalid_join_key = ( - f"select order_id, driver_id, customer_id as customer, " - f"order_is_success, {timestamp_column}, FROM {gcp_project}.{table_id}" - ) - # Rename the join key; this should now raise an error. - assertpy.assert_that(store.get_historical_features).raises( - errors.FeastEntityDFMissingColumnsError - ).when_called_with( - entity_df=entity_df_query_with_invalid_join_key, - feature_refs=[ - "driver_stats:conv_rate", - "driver_stats:avg_daily_trips", - "customer_profile:current_balance", - "customer_profile:avg_passenger_count", - "customer_profile:lifetime_trip_count", - ], - ) - - job_from_df = store.get_historical_features( - entity_df=orders_df, - feature_refs=[ - "driver_stats:conv_rate", - "driver_stats:avg_daily_trips", - "customer_profile:current_balance", - "customer_profile:avg_passenger_count", - "customer_profile:lifetime_trip_count", - ], - full_feature_names=full_feature_names, - ) - - # Rename the join key; this should now raise an error. - orders_df_with_invalid_join_key = orders_df.rename( - {"customer_id": "customer"}, axis="columns" - ) - assertpy.assert_that(store.get_historical_features).raises( - errors.FeastEntityDFMissingColumnsError - ).when_called_with( - entity_df=orders_df_with_invalid_join_key, - feature_refs=[ - "driver_stats:conv_rate", - "driver_stats:avg_daily_trips", - "customer_profile:current_balance", - "customer_profile:avg_passenger_count", - "customer_profile:lifetime_trip_count", - ], - ) - - # Make sure that custom dataset name is being used from the offline_store config - if provider_type == "gcp_custom_offline_config": - assertpy.assert_that(job_from_df.query).contains("foo.entity_df") - else: - assertpy.assert_that(job_from_df.query).contains( - f"{bigquery_dataset}.entity_df" - ) - - start_time = datetime.utcnow() - actual_df_from_df_entities = job_from_df.to_df() - end_time = datetime.utcnow() - with capsys.disabled(): - print( - str( - f"Time to execute job_from_df.to_df() = '{(end_time - start_time)}'\n" - ) - ) - - assert sorted(expected_df.columns) == sorted(actual_df_from_df_entities.columns) - assert_frame_equal( - expected_df.sort_values( - by=[event_timestamp, "order_id", "driver_id", "customer_id"] - ).reset_index(drop=True), - actual_df_from_df_entities[expected_df.columns] - .sort_values(by=[event_timestamp, "order_id", "driver_id", "customer_id"]) - .reset_index(drop=True), - check_dtype=False, - ) - - table_from_df_entities = job_from_df.to_arrow() - assert_frame_equal( - actual_df_from_df_entities, table_from_df_entities.to_pandas() - ) - - -@pytest.mark.integration -def test_timestamp_bound_inference_from_entity_df_using_bigquery(): - start_date = datetime.now().replace(microsecond=0, second=0, minute=0) - (_, _, _, entity_df, start_date) = generate_entities( - start_date, infer_event_timestamp_col=True - ) - - table_id = f"foo.table_id_{int(time.time_ns())}_{random.randint(1000, 9999)}" - stage_orders_bigquery(entity_df, table_id) - - client = bigquery.Client() - table = client.get_table(table=table_id) - - # Ensure that the table expires after some time - table.expires = datetime.utcnow() + timedelta(minutes=30) - client.update_table(table, ["expires"]) - - min_timestamp, max_timestamp = _get_entity_df_timestamp_bounds( - client, str(table.reference), "e_ts" - ) - - assert min_timestamp.astimezone("UTC") == min(entity_df["e_ts"]).astimezone("UTC") - assert max_timestamp.astimezone("UTC") == max(entity_df["e_ts"]).astimezone("UTC") - - -def test_feature_name_collision_on_historical_retrieval(): - - # _validate_feature_refs is the function that checks for colliding feature names - # check when feature names collide and 'full_feature_names=False' - with pytest.raises(FeatureNameCollisionError) as error: - _validate_feature_refs( - feature_refs=[ - "driver_stats:conv_rate", - "driver_stats:avg_daily_trips", - "customer_profile:current_balance", - "customer_profile:avg_passenger_count", - "customer_profile:lifetime_trip_count", - "customer_profile:avg_daily_trips", - ], - full_feature_names=False, - ) - - expected_error_message = ( - "Duplicate features named avg_daily_trips found.\n" - "To resolve this collision, either use the full feature name by setting " - "'full_feature_names=True', or ensure that the features in question have different names." - ) - - assert str(error.value) == expected_error_message - - # check when feature names collide and 'full_feature_names=True' - with pytest.raises(FeatureNameCollisionError) as error: - _validate_feature_refs( - feature_refs=[ - "driver_stats:conv_rate", - "driver_stats:avg_daily_trips", - "driver_stats:avg_daily_trips", - "customer_profile:current_balance", - "customer_profile:avg_passenger_count", - "customer_profile:lifetime_trip_count", - "customer_profile:avg_daily_trips", - ], - full_feature_names=True, - ) - - expected_error_message = ( - "Duplicate features named driver_stats__avg_daily_trips found.\n" - "To resolve this collision, please ensure that the features in question " - "have different names." - ) - assert str(error.value) == expected_error_message diff --git a/sdk/python/tests/integration/offline_store/test_s3_custom_endpoint.py b/sdk/python/tests/integration/offline_store/test_s3_custom_endpoint.py new file mode 100644 index 0000000000..dfe14d73f9 --- /dev/null +++ b/sdk/python/tests/integration/offline_store/test_s3_custom_endpoint.py @@ -0,0 +1,53 @@ +from typing import List + +import pytest + +from feast.feature_store import FeastObject +from tests.integration.feature_repos.repo_configuration import ( + IntegrationTestRepoConfig, + construct_test_environment, + construct_universal_feature_views, +) +from tests.integration.feature_repos.universal.entities import customer, driver + +# TODO: Allow integration tests to run using different credentials. + + +@pytest.mark.integration +@pytest.mark.skip( + reason="No way to run this test today. Credentials conflict with real AWS credentials in CI" +) +def test_registration_and_retrieval_from_custom_s3_endpoint(universal_data_sources,): + config = IntegrationTestRepoConfig( + offline_store_creator="tests.integration.feature_repos.universal.data_sources.file.S3FileDataSourceCreator" + ) + import os + + if "AWS_ACCESS_KEY_ID" in os.environ: + raise Exception( + "AWS_ACCESS_KEY_ID has already been set in the environment. Setting it again may cause a conflict. " + "It may be better to deduplicate AWS configuration or use sub-processes for isolation" + ) + + os.environ["AWS_ACCESS_KEY_ID"] = "AKIAIOSFODNN7EXAMPLE" + os.environ["AWS_SECRET_ACCESS_KEY"] = "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY" + + with construct_test_environment(config) as environment: + fs = environment.feature_store + + entities, datasets, data_sources = universal_data_sources + feature_views = construct_universal_feature_views(data_sources) + + feast_objects: List[FeastObject] = [] + feast_objects.extend(feature_views.values()) + feast_objects.extend([driver(), customer()]) + fs.apply(feast_objects) + fs.materialize(environment.start_date, environment.end_date) + + out = fs.get_online_features( + features=["driver_stats:conv_rate"], entity_rows=[{"driver_id": 5001}] + ).to_dict() + assert out["conv_rate"][0] is not None + + del os.environ["AWS_ACCESS_KEY_ID"] + del os.environ["AWS_SECRET_ACCESS_KEY"] diff --git a/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py b/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py new file mode 100644 index 0000000000..b62f7cda24 --- /dev/null +++ b/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py @@ -0,0 +1,843 @@ +import random +import time +from datetime import datetime, timedelta +from typing import Any, Dict, List, Optional + +import numpy as np +import pandas as pd +import pytest +from pandas.testing import assert_frame_equal as pd_assert_frame_equal +from pytz import utc + +from feast import utils +from feast.entity import Entity +from feast.errors import ( + FeatureNameCollisionError, + RequestDataNotFoundInEntityDfException, +) +from feast.feature_service import FeatureService +from feast.feature_view import FeatureView +from feast.field import Field +from feast.infra.offline_stores.offline_utils import ( + DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL, +) +from feast.types import Int32 +from feast.value_type import ValueType +from tests.integration.feature_repos.repo_configuration import ( + construct_universal_feature_views, + table_name_from_data_source, +) +from tests.integration.feature_repos.universal.data_sources.snowflake import ( + SnowflakeDataSourceCreator, +) +from tests.integration.feature_repos.universal.entities import ( + customer, + driver, + location, +) + +np.random.seed(0) + + +def convert_timestamp_records_to_utc( + records: List[Dict[str, Any]], column: str +) -> List[Dict[str, Any]]: + for record in records: + record[column] = utils.make_tzaware(record[column]).astimezone(utc) + return records + + +# Find the latest record in the given time range and filter +def find_asof_record( + records: List[Dict[str, Any]], + ts_key: str, + ts_start: datetime, + ts_end: datetime, + filter_keys: Optional[List[str]] = None, + filter_values: Optional[List[Any]] = None, +) -> Dict[str, Any]: + filter_keys = filter_keys or [] + filter_values = filter_values or [] + assert len(filter_keys) == len(filter_values) + found_record: Dict[str, Any] = {} + for record in records: + if ( + all( + [ + record[filter_key] == filter_value + for filter_key, filter_value in zip(filter_keys, filter_values) + ] + ) + and ts_start <= record[ts_key] <= ts_end + ): + if not found_record or found_record[ts_key] < record[ts_key]: + found_record = record + return found_record + + +def get_expected_training_df( + customer_df: pd.DataFrame, + customer_fv: FeatureView, + driver_df: pd.DataFrame, + driver_fv: FeatureView, + orders_df: pd.DataFrame, + order_fv: FeatureView, + location_df: pd.DataFrame, + location_fv: FeatureView, + global_df: pd.DataFrame, + global_fv: FeatureView, + field_mapping_df: pd.DataFrame, + field_mapping_fv: FeatureView, + entity_df: pd.DataFrame, + event_timestamp: str, + full_feature_names: bool = False, +): + # Convert all pandas dataframes into records with UTC timestamps + customer_records = convert_timestamp_records_to_utc( + customer_df.to_dict("records"), customer_fv.batch_source.timestamp_field + ) + driver_records = convert_timestamp_records_to_utc( + driver_df.to_dict("records"), driver_fv.batch_source.timestamp_field + ) + order_records = convert_timestamp_records_to_utc( + orders_df.to_dict("records"), event_timestamp + ) + location_records = convert_timestamp_records_to_utc( + location_df.to_dict("records"), location_fv.batch_source.timestamp_field + ) + global_records = convert_timestamp_records_to_utc( + global_df.to_dict("records"), global_fv.batch_source.timestamp_field + ) + field_mapping_records = convert_timestamp_records_to_utc( + field_mapping_df.to_dict("records"), + field_mapping_fv.batch_source.timestamp_field, + ) + entity_rows = convert_timestamp_records_to_utc( + entity_df.to_dict("records"), event_timestamp + ) + + # Manually do point-in-time join of driver, customer, and order records against + # the entity df + for entity_row in entity_rows: + customer_record = find_asof_record( + customer_records, + ts_key=customer_fv.batch_source.timestamp_field, + ts_start=entity_row[event_timestamp] - customer_fv.ttl, + ts_end=entity_row[event_timestamp], + filter_keys=["customer_id"], + filter_values=[entity_row["customer_id"]], + ) + driver_record = find_asof_record( + driver_records, + ts_key=driver_fv.batch_source.timestamp_field, + ts_start=entity_row[event_timestamp] - driver_fv.ttl, + ts_end=entity_row[event_timestamp], + filter_keys=["driver_id"], + filter_values=[entity_row["driver_id"]], + ) + order_record = find_asof_record( + order_records, + ts_key=customer_fv.batch_source.timestamp_field, + ts_start=entity_row[event_timestamp] - order_fv.ttl, + ts_end=entity_row[event_timestamp], + filter_keys=["customer_id", "driver_id"], + filter_values=[entity_row["customer_id"], entity_row["driver_id"]], + ) + origin_record = find_asof_record( + location_records, + ts_key=location_fv.batch_source.timestamp_field, + ts_start=order_record[event_timestamp] - location_fv.ttl, + ts_end=order_record[event_timestamp], + filter_keys=["location_id"], + filter_values=[order_record["origin_id"]], + ) + destination_record = find_asof_record( + location_records, + ts_key=location_fv.batch_source.timestamp_field, + ts_start=order_record[event_timestamp] - location_fv.ttl, + ts_end=order_record[event_timestamp], + filter_keys=["location_id"], + filter_values=[order_record["destination_id"]], + ) + global_record = find_asof_record( + global_records, + ts_key=global_fv.batch_source.timestamp_field, + ts_start=order_record[event_timestamp] - global_fv.ttl, + ts_end=order_record[event_timestamp], + ) + + field_mapping_record = find_asof_record( + field_mapping_records, + ts_key=field_mapping_fv.batch_source.timestamp_field, + ts_start=order_record[event_timestamp] - field_mapping_fv.ttl, + ts_end=order_record[event_timestamp], + ) + + entity_row.update( + { + ( + f"customer_profile__{k}" if full_feature_names else k + ): customer_record.get(k, None) + for k in ( + "current_balance", + "avg_passenger_count", + "lifetime_trip_count", + ) + } + ) + entity_row.update( + { + (f"driver_stats__{k}" if full_feature_names else k): driver_record.get( + k, None + ) + for k in ("conv_rate", "avg_daily_trips") + } + ) + entity_row.update( + { + (f"order__{k}" if full_feature_names else k): order_record.get(k, None) + for k in ("order_is_success",) + } + ) + entity_row.update( + { + "origin__temperature": origin_record.get("temperature", None), + "destination__temperature": destination_record.get("temperature", None), + } + ) + entity_row.update( + { + (f"global_stats__{k}" if full_feature_names else k): global_record.get( + k, None + ) + for k in ("num_rides", "avg_ride_length",) + } + ) + + # get field_mapping_record by column name, but label by feature name + entity_row.update( + { + ( + f"field_mapping__{feature}" if full_feature_names else feature + ): field_mapping_record.get(column, None) + for ( + column, + feature, + ) in field_mapping_fv.batch_source.field_mapping.items() + } + ) + + # Convert records back to pandas dataframe + expected_df = pd.DataFrame(entity_rows) + + # Move "event_timestamp" column to front + current_cols = expected_df.columns.tolist() + current_cols.remove(event_timestamp) + expected_df = expected_df[[event_timestamp] + current_cols] + + # Cast some columns to expected types, since we lose information when converting pandas DFs into Python objects. + if full_feature_names: + expected_column_types = { + "order__order_is_success": "int32", + "driver_stats__conv_rate": "float32", + "customer_profile__current_balance": "float32", + "customer_profile__avg_passenger_count": "float32", + "global_stats__avg_ride_length": "float32", + "field_mapping__feature_name": "int32", + } + else: + expected_column_types = { + "order_is_success": "int32", + "conv_rate": "float32", + "current_balance": "float32", + "avg_passenger_count": "float32", + "avg_ride_length": "float32", + "feature_name": "int32", + } + + for col, typ in expected_column_types.items(): + expected_df[col] = expected_df[col].astype(typ) + + conv_feature_name = "driver_stats__conv_rate" if full_feature_names else "conv_rate" + conv_plus_feature_name = response_feature_name( + "conv_rate_plus_100", full_feature_names + ) + expected_df[conv_plus_feature_name] = expected_df[conv_feature_name] + 100 + expected_df[ + response_feature_name("conv_rate_plus_100_rounded", full_feature_names) + ] = ( + expected_df[conv_plus_feature_name] + .astype("float") + .round() + .astype(pd.Int32Dtype()) + ) + if "val_to_add" in expected_df.columns: + expected_df[ + response_feature_name("conv_rate_plus_val_to_add", full_feature_names) + ] = (expected_df[conv_feature_name] + expected_df["val_to_add"]) + + return expected_df + + +@pytest.mark.integration +@pytest.mark.universal +@pytest.mark.parametrize("full_feature_names", [True, False], ids=lambda v: str(v)) +def test_historical_features(environment, universal_data_sources, full_feature_names): + store = environment.feature_store + + (entities, datasets, data_sources) = universal_data_sources + + feature_views = construct_universal_feature_views(data_sources) + + entity_df_with_request_data = datasets.entity_df.copy(deep=True) + entity_df_with_request_data["val_to_add"] = [ + i for i in range(len(entity_df_with_request_data)) + ] + entity_df_with_request_data["driver_age"] = [ + i + 100 for i in range(len(entity_df_with_request_data)) + ] + + feature_service = FeatureService( + name="convrate_plus100", + features=[feature_views.driver[["conv_rate"]], feature_views.driver_odfv], + ) + feature_service_entity_mapping = FeatureService( + name="entity_mapping", + features=[ + feature_views.location.with_name("origin").with_join_key_map( + {"location_id": "origin_id"} + ), + feature_views.location.with_name("destination").with_join_key_map( + {"location_id": "destination_id"} + ), + ], + ) + + store.apply( + [ + driver(), + customer(), + location(), + feature_service, + feature_service_entity_mapping, + *feature_views.values(), + ] + ) + + event_timestamp = ( + DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL + if DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL in datasets.orders_df.columns + else "e_ts" + ) + full_expected_df = get_expected_training_df( + datasets.customer_df, + feature_views.customer, + datasets.driver_df, + feature_views.driver, + datasets.orders_df, + feature_views.order, + datasets.location_df, + feature_views.location, + datasets.global_df, + feature_views.global_fv, + datasets.field_mapping_df, + feature_views.field_mapping, + entity_df_with_request_data, + event_timestamp, + full_feature_names, + ) + + # Only need the shadow entities features in the FeatureService test + expected_df = full_expected_df.drop( + columns=["origin__temperature", "destination__temperature"], + ) + + job_from_df = store.get_historical_features( + entity_df=entity_df_with_request_data, + features=[ + "driver_stats:conv_rate", + "driver_stats:avg_daily_trips", + "customer_profile:current_balance", + "customer_profile:avg_passenger_count", + "customer_profile:lifetime_trip_count", + "conv_rate_plus_100:conv_rate_plus_100", + "conv_rate_plus_100:conv_rate_plus_100_rounded", + "conv_rate_plus_100:conv_rate_plus_val_to_add", + "order:order_is_success", + "global_stats:num_rides", + "global_stats:avg_ride_length", + "field_mapping:feature_name", + ], + full_feature_names=full_feature_names, + ) + + start_time = datetime.utcnow() + actual_df_from_df_entities = job_from_df.to_df() + + print(f"actual_df_from_df_entities shape: {actual_df_from_df_entities.shape}") + end_time = datetime.utcnow() + print(str(f"Time to execute job_from_df.to_df() = '{(end_time - start_time)}'\n")) + + assert sorted(expected_df.columns) == sorted(actual_df_from_df_entities.columns) + assert_frame_equal( + expected_df, + actual_df_from_df_entities, + keys=[event_timestamp, "order_id", "driver_id", "customer_id"], + ) + + assert_feature_service_correctness( + store, + feature_service, + full_feature_names, + entity_df_with_request_data, + expected_df, + event_timestamp, + ) + assert_feature_service_entity_mapping_correctness( + store, + feature_service_entity_mapping, + full_feature_names, + entity_df_with_request_data, + full_expected_df, + event_timestamp, + ) + table_from_df_entities: pd.DataFrame = job_from_df.to_arrow().to_pandas() + + assert_frame_equal( + expected_df, + table_from_df_entities, + keys=[event_timestamp, "order_id", "driver_id", "customer_id"], + ) + + +@pytest.mark.integration +@pytest.mark.universal +@pytest.mark.parametrize("full_feature_names", [True, False], ids=lambda v: str(v)) +def test_historical_features_with_missing_request_data( + environment, universal_data_sources, full_feature_names +): + store = environment.feature_store + + (_, datasets, data_sources) = universal_data_sources + feature_views = construct_universal_feature_views(data_sources) + + store.apply([driver(), customer(), location(), *feature_views.values()]) + + # If request data is missing that's needed for on demand transform, throw an error + with pytest.raises(RequestDataNotFoundInEntityDfException): + store.get_historical_features( + entity_df=datasets.entity_df, + features=[ + "customer_profile:current_balance", + "customer_profile:avg_passenger_count", + "customer_profile:lifetime_trip_count", + "conv_rate_plus_100:conv_rate_plus_100", + "conv_rate_plus_100:conv_rate_plus_val_to_add", + "global_stats:num_rides", + "global_stats:avg_ride_length", + "field_mapping:feature_name", + ], + full_feature_names=full_feature_names, + ) + + +@pytest.mark.integration +@pytest.mark.universal +@pytest.mark.parametrize("full_feature_names", [True, False], ids=lambda v: str(v)) +def test_historical_features_with_entities_from_query( + environment, universal_data_sources, full_feature_names +): + store = environment.feature_store + (entities, datasets, data_sources) = universal_data_sources + feature_views = construct_universal_feature_views(data_sources) + + orders_table = table_name_from_data_source(data_sources.orders) + if not orders_table: + raise pytest.skip("Offline source is not sql-based") + + data_source_creator = environment.test_repo_config.offline_store_creator + if data_source_creator.__name__ == SnowflakeDataSourceCreator.__name__: + entity_df_query = f""" + SELECT "customer_id", "driver_id", "order_id", "origin_id", "destination_id", "event_timestamp" + FROM "{orders_table}" + """ + else: + entity_df_query = f""" + SELECT customer_id, driver_id, order_id, origin_id, destination_id, event_timestamp + FROM {orders_table} + """ + + store.apply([driver(), customer(), location(), *feature_views.values()]) + + job_from_sql = store.get_historical_features( + entity_df=entity_df_query, + features=[ + "customer_profile:current_balance", + "customer_profile:avg_passenger_count", + "customer_profile:lifetime_trip_count", + "order:order_is_success", + "global_stats:num_rides", + "global_stats:avg_ride_length", + "field_mapping:feature_name", + ], + full_feature_names=full_feature_names, + ) + + start_time = datetime.utcnow() + actual_df_from_sql_entities = job_from_sql.to_df() + end_time = datetime.utcnow() + print(str(f"\nTime to execute job_from_sql.to_df() = '{(end_time - start_time)}'")) + + event_timestamp = ( + DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL + if DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL in datasets.orders_df.columns + else "e_ts" + ) + full_expected_df = get_expected_training_df( + datasets.customer_df, + feature_views.customer, + datasets.driver_df, + feature_views.driver, + datasets.orders_df, + feature_views.order, + datasets.location_df, + feature_views.location, + datasets.global_df, + feature_views.global_fv, + datasets.field_mapping_df, + feature_views.field_mapping, + datasets.entity_df, + event_timestamp, + full_feature_names, + ) + + # Not requesting the on demand transform with an entity_df query (can't add request data in them) + expected_df_query = full_expected_df.drop( + columns=[ + response_feature_name("conv_rate_plus_100", full_feature_names), + response_feature_name("conv_rate_plus_100_rounded", full_feature_names), + response_feature_name("avg_daily_trips", full_feature_names), + response_feature_name("conv_rate", full_feature_names), + "origin__temperature", + "destination__temperature", + ] + ) + assert_frame_equal( + expected_df_query, + actual_df_from_sql_entities, + keys=[event_timestamp, "order_id", "driver_id", "customer_id"], + ) + + table_from_sql_entities = job_from_sql.to_arrow().to_pandas() + for col in table_from_sql_entities.columns: + expected_df_query[col] = expected_df_query[col].astype( + table_from_sql_entities[col].dtype + ) + + assert_frame_equal( + expected_df_query, + table_from_sql_entities, + keys=[event_timestamp, "order_id", "driver_id", "customer_id"], + ) + + +@pytest.mark.integration +@pytest.mark.universal +@pytest.mark.parametrize("full_feature_names", [True, False], ids=lambda v: str(v)) +def test_historical_features_persisting( + environment, universal_data_sources, full_feature_names +): + store = environment.feature_store + + (entities, datasets, data_sources) = universal_data_sources + feature_views = construct_universal_feature_views(data_sources) + + store.apply([driver(), customer(), location(), *feature_views.values()]) + + entity_df = datasets.entity_df.drop( + columns=["order_id", "origin_id", "destination_id"] + ) + + job = store.get_historical_features( + entity_df=entity_df, + features=[ + "customer_profile:current_balance", + "customer_profile:avg_passenger_count", + "customer_profile:lifetime_trip_count", + "order:order_is_success", + "global_stats:num_rides", + "global_stats:avg_ride_length", + "field_mapping:feature_name", + ], + full_feature_names=full_feature_names, + ) + + saved_dataset = store.create_saved_dataset( + from_=job, + name="saved_dataset", + storage=environment.data_source_creator.create_saved_dataset_destination(), + tags={"env": "test"}, + ) + + event_timestamp = DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL + expected_df = get_expected_training_df( + datasets.customer_df, + feature_views.customer, + datasets.driver_df, + feature_views.driver, + datasets.orders_df, + feature_views.order, + datasets.location_df, + feature_views.location, + datasets.global_df, + feature_views.global_fv, + datasets.field_mapping_df, + feature_views.field_mapping, + entity_df, + event_timestamp, + full_feature_names, + ).drop( + columns=[ + response_feature_name("conv_rate_plus_100", full_feature_names), + response_feature_name("conv_rate_plus_100_rounded", full_feature_names), + response_feature_name("avg_daily_trips", full_feature_names), + response_feature_name("conv_rate", full_feature_names), + "origin__temperature", + "destination__temperature", + ] + ) + + assert_frame_equal( + expected_df, + saved_dataset.to_df(), + keys=[event_timestamp, "driver_id", "customer_id"], + ) + + assert_frame_equal( + job.to_df(), + saved_dataset.to_df(), + keys=[event_timestamp, "driver_id", "customer_id"], + ) + + +@pytest.mark.integration +@pytest.mark.universal +def test_historical_features_from_bigquery_sources_containing_backfills(environment): + store = environment.feature_store + + now = datetime.now().replace(microsecond=0, second=0, minute=0) + tomorrow = now + timedelta(days=1) + day_after_tomorrow = now + timedelta(days=2) + + entity_df = pd.DataFrame( + data=[ + {"driver_id": 1001, "event_timestamp": day_after_tomorrow}, + {"driver_id": 1002, "event_timestamp": day_after_tomorrow}, + ] + ) + + driver_stats_df = pd.DataFrame( + data=[ + # Duplicated rows simple case + { + "driver_id": 1001, + "avg_daily_trips": 10, + "event_timestamp": now, + "created": now, + }, + { + "driver_id": 1001, + "avg_daily_trips": 20, + "event_timestamp": now, + "created": tomorrow, + }, + # Duplicated rows after a backfill + { + "driver_id": 1002, + "avg_daily_trips": 30, + "event_timestamp": now, + "created": tomorrow, + }, + { + "driver_id": 1002, + "avg_daily_trips": 40, + "event_timestamp": tomorrow, + "created": now, + }, + ] + ) + + expected_df = pd.DataFrame( + data=[ + { + "driver_id": 1001, + "event_timestamp": day_after_tomorrow, + "avg_daily_trips": 20, + }, + { + "driver_id": 1002, + "event_timestamp": day_after_tomorrow, + "avg_daily_trips": 40, + }, + ] + ) + + driver_stats_data_source = environment.data_source_creator.create_data_source( + df=driver_stats_df, + destination_name=f"test_driver_stats_{int(time.time_ns())}_{random.randint(1000, 9999)}", + timestamp_field="event_timestamp", + created_timestamp_column="created", + ) + + driver = Entity(name="driver", join_keys=["driver_id"], value_type=ValueType.INT64) + driver_fv = FeatureView( + name="driver_stats", + entities=["driver"], + schema=[Field(name="avg_daily_trips", dtype=Int32)], + batch_source=driver_stats_data_source, + ttl=None, + ) + + store.apply([driver, driver_fv]) + + offline_job = store.get_historical_features( + entity_df=entity_df, + features=["driver_stats:avg_daily_trips"], + full_feature_names=False, + ) + + start_time = datetime.utcnow() + actual_df = offline_job.to_df() + + print(f"actual_df shape: {actual_df.shape}") + end_time = datetime.utcnow() + print(str(f"Time to execute job_from_df.to_df() = '{(end_time - start_time)}'\n")) + + assert sorted(expected_df.columns) == sorted(actual_df.columns) + assert_frame_equal(expected_df, actual_df, keys=["driver_id"]) + + +def response_feature_name(feature: str, full_feature_names: bool) -> str: + if feature in {"conv_rate", "avg_daily_trips"} and full_feature_names: + return f"driver_stats__{feature}" + + if ( + feature + in { + "conv_rate_plus_100", + "conv_rate_plus_100_rounded", + "conv_rate_plus_val_to_add", + } + and full_feature_names + ): + return f"conv_rate_plus_100__{feature}" + + return feature + + +def assert_feature_service_correctness( + store, feature_service, full_feature_names, entity_df, expected_df, event_timestamp +): + + job_from_df = store.get_historical_features( + entity_df=entity_df, + features=feature_service, + full_feature_names=full_feature_names, + ) + + actual_df_from_df_entities = job_from_df.to_df() + + expected_df = expected_df[ + [ + event_timestamp, + "order_id", + "driver_id", + "customer_id", + response_feature_name("conv_rate", full_feature_names), + response_feature_name("conv_rate_plus_100", full_feature_names), + "driver_age", + ] + ] + + assert_frame_equal( + expected_df, + actual_df_from_df_entities, + keys=[event_timestamp, "order_id", "driver_id", "customer_id"], + ) + + +def assert_feature_service_entity_mapping_correctness( + store, feature_service, full_feature_names, entity_df, expected_df, event_timestamp +): + if full_feature_names: + job_from_df = store.get_historical_features( + entity_df=entity_df, + features=feature_service, + full_feature_names=full_feature_names, + ) + actual_df_from_df_entities = job_from_df.to_df() + + expected_df: pd.DataFrame = ( + expected_df.sort_values( + by=[ + event_timestamp, + "order_id", + "driver_id", + "customer_id", + "origin_id", + "destination_id", + ] + ) + .drop_duplicates() + .reset_index(drop=True) + ) + expected_df = expected_df[ + [ + event_timestamp, + "order_id", + "driver_id", + "customer_id", + "origin_id", + "destination_id", + "origin__temperature", + "destination__temperature", + ] + ] + + assert_frame_equal( + expected_df, + actual_df_from_df_entities, + keys=[ + event_timestamp, + "order_id", + "driver_id", + "customer_id", + "origin_id", + "destination_id", + ], + ) + else: + # using 2 of the same FeatureView without full_feature_names=True will result in collision + with pytest.raises(FeatureNameCollisionError): + job_from_df = store.get_historical_features( + entity_df=entity_df, + features=feature_service, + full_feature_names=full_feature_names, + ) + + +def assert_frame_equal(expected_df, actual_df, keys): + expected_df: pd.DataFrame = ( + expected_df.sort_values(by=keys).drop_duplicates().reset_index(drop=True) + ) + + actual_df = ( + actual_df[expected_df.columns] + .sort_values(by=keys) + .drop_duplicates() + .reset_index(drop=True) + ) + + pd_assert_frame_equal( + expected_df, actual_df, check_dtype=False, + ) diff --git a/sdk/python/tests/integration/online_store/test_e2e_local.py b/sdk/python/tests/integration/online_store/test_e2e_local.py index 58e32ad26c..c1aa10900a 100644 --- a/sdk/python/tests/integration/online_store/test_e2e_local.py +++ b/sdk/python/tests/integration/online_store/test_e2e_local.py @@ -12,12 +12,13 @@ def _get_last_feature_row(df: pd.DataFrame, driver_id, max_date: datetime): - """ Manually extract last feature value from a dataframe for a given driver_id with up to `max_date` date """ + """Manually extract last feature value from a dataframe for a given driver_id with up to `max_date` date""" filtered = df[ - (df["driver_id"] == driver_id) & (df["datetime"] < max_date.replace(tzinfo=utc)) + (df["driver_id"] == driver_id) + & (df["event_timestamp"] < max_date.replace(tzinfo=utc)) ] - max_ts = filtered.loc[filtered["datetime"].idxmax()]["datetime"] - filtered_by_ts = filtered[filtered["datetime"] == max_ts] + max_ts = filtered.loc[filtered["event_timestamp"].idxmax()]["event_timestamp"] + filtered_by_ts = filtered[filtered["event_timestamp"] == max_ts] return filtered_by_ts.loc[filtered_by_ts["created"].idxmax()] @@ -26,25 +27,42 @@ def _assert_online_features( ): """Assert that features in online store are up to date with `max_date` date.""" # Read features back - result = store.get_online_features( - feature_refs=[ + response = store.get_online_features( + features=[ "driver_hourly_stats:conv_rate", "driver_hourly_stats:avg_daily_trips", + "global_daily_stats:num_rides", + "global_daily_stats:avg_ride_length", ], entity_rows=[{"driver_id": 1001}], full_feature_names=True, ) - assert "driver_hourly_stats__avg_daily_trips" in result.to_dict() + # Float features should still be floats from the online store... + assert ( + response.proto.results[ + list(response.proto.metadata.feature_names.val).index( + "driver_hourly_stats__conv_rate" + ) + ] + .values[0] + .float_val + > 0 + ) - assert "driver_hourly_stats__conv_rate" in result.to_dict() + result = response.to_dict() + assert len(result) == 5 + assert "driver_hourly_stats__avg_daily_trips" in result + assert "driver_hourly_stats__conv_rate" in result assert ( abs( - result.to_dict()["driver_hourly_stats__conv_rate"][0] + result["driver_hourly_stats__conv_rate"][0] - _get_last_feature_row(driver_df, 1001, max_date)["conv_rate"] ) < 0.01 ) + assert "global_daily_stats__num_rides" in result + assert "global_daily_stats__avg_ride_length" in result def test_e2e_local() -> None: @@ -68,16 +86,19 @@ def test_e2e_local() -> None: driver_df = driver_data.create_driver_hourly_stats_df( driver_entities, start_date, end_date ) - driver_stats_path = os.path.join(data_dir, "driver_stats.parquet") driver_df.to_parquet(path=driver_stats_path, allow_truncated_timestamps=True) + global_df = driver_data.create_global_daily_stats_df(start_date, end_date) + global_stats_path = os.path.join(data_dir, "global_stats.parquet") + global_df.to_parquet(path=global_stats_path, allow_truncated_timestamps=True) + # Note that runner takes care of running apply/teardown for us here. # We patch python code in example_feature_repo_2.py to set the path to Parquet files. with runner.local_repo( - get_example_repo("example_feature_repo_2.py").replace( - "%PARQUET_PATH%", driver_stats_path - ), + get_example_repo("example_feature_repo_2.py") + .replace("%PARQUET_PATH%", driver_stats_path) + .replace("%PARQUET_PATH_GLOBAL%", global_stats_path), "file", ) as store: diff --git a/sdk/python/tests/integration/online_store/test_feature_service_read.py b/sdk/python/tests/integration/online_store/test_feature_service_read.py new file mode 100644 index 0000000000..33c318b9ed --- /dev/null +++ b/sdk/python/tests/integration/online_store/test_feature_service_read.py @@ -0,0 +1,22 @@ +import pytest + +from tests.utils.cli_utils import CliRunner, get_example_repo +from tests.utils.online_read_write_test import basic_rw_test + + +@pytest.mark.integration +def test_feature_service_read() -> None: + """ + Read feature values from the FeatureStore using a FeatureService. + """ + + runner = CliRunner() + with runner.local_repo( + get_example_repo("example_feature_repo_1.py"), "bigquery" + ) as store: + + basic_rw_test( + store, + view_name="driver_locations", + feature_service_name="driver_locations_service", + ) diff --git a/sdk/python/tests/integration/online_store/test_online_retrieval.py b/sdk/python/tests/integration/online_store/test_online_retrieval.py index c981552fdf..9cf4d9a182 100644 --- a/sdk/python/tests/integration/online_store/test_online_retrieval.py +++ b/sdk/python/tests/integration/online_store/test_online_retrieval.py @@ -34,7 +34,7 @@ def test_online() -> None: provider = store._get_provider() driver_key = EntityKeyProto( - join_keys=["driver"], entity_values=[ValueProto(int64_val=1)] + join_keys=["driver_id"], entity_values=[ValueProto(int64_val=1)] ) provider.online_write_batch( config=store.config, @@ -54,7 +54,7 @@ def test_online() -> None: ) customer_key = EntityKeyProto( - join_keys=["customer"], entity_values=[ValueProto(int64_val=5)] + join_keys=["customer_id"], entity_values=[ValueProto(string_val="5")] ) provider.online_write_batch( config=store.config, @@ -75,8 +75,8 @@ def test_online() -> None: ) customer_key = EntityKeyProto( - join_keys=["customer", "driver"], - entity_values=[ValueProto(int64_val=5), ValueProto(int64_val=1)], + join_keys=["customer_id", "driver_id"], + entity_values=[ValueProto(string_val="5"), ValueProto(int64_val=1)], ) provider.online_write_batch( config=store.config, @@ -94,21 +94,24 @@ def test_online() -> None: # Retrieve two features using two keys, one valid one non-existing result = store.get_online_features( - feature_refs=[ + features=[ "driver_locations:lon", "customer_profile:avg_orders_day", "customer_profile:name", "customer_driver_combined:trips", ], - entity_rows=[{"driver": 1, "customer": 5}, {"driver": 1, "customer": 5}], + entity_rows=[ + {"driver_id": 1, "customer_id": "5"}, + {"driver_id": 1, "customer_id": 5}, + ], full_feature_names=False, ).to_dict() assert "lon" in result assert "avg_orders_day" in result assert "name" in result - assert result["driver"] == [1, 1] - assert result["customer"] == [5, 5] + assert result["driver_id"] == [1, 1] + assert result["customer_id"] == ["5", "5"] assert result["lon"] == ["1.0", "1.0"] assert result["avg_orders_day"] == [1.0, 1.0] assert result["name"] == ["John", "John"] @@ -116,8 +119,8 @@ def test_online() -> None: # Ensure features are still in result when keys not found result = store.get_online_features( - feature_refs=["customer_driver_combined:trips"], - entity_rows=[{"driver": 0, "customer": 0}], + features=["customer_driver_combined:trips"], + entity_rows=[{"driver_id": 0, "customer_id": 0}], full_feature_names=False, ).to_dict() @@ -126,8 +129,8 @@ def test_online() -> None: # invalid table reference with pytest.raises(FeatureViewNotFoundException): store.get_online_features( - feature_refs=["driver_locations_bad:lon"], - entity_rows=[{"driver": 1}], + features=["driver_locations_bad:lon"], + entity_rows=[{"driver_id": 1}], full_feature_names=False, ) @@ -146,13 +149,13 @@ def test_online() -> None: # Should download the registry and cache it permanently (or until manually refreshed) result = fs_fast_ttl.get_online_features( - feature_refs=[ + features=[ "driver_locations:lon", "customer_profile:avg_orders_day", "customer_profile:name", "customer_driver_combined:trips", ], - entity_rows=[{"driver": 1, "customer": 5}], + entity_rows=[{"driver_id": 1, "customer_id": 5}], full_feature_names=False, ).to_dict() assert result["lon"] == ["1.0"] @@ -167,13 +170,13 @@ def test_online() -> None: # Will try to reload registry because it has expired (it will fail because we deleted the actual registry file) with pytest.raises(FileNotFoundError): fs_fast_ttl.get_online_features( - feature_refs=[ + features=[ "driver_locations:lon", "customer_profile:avg_orders_day", "customer_profile:name", "customer_driver_combined:trips", ], - entity_rows=[{"driver": 1, "customer": 5}], + entity_rows=[{"driver_id": 1, "customer_id": 5}], full_feature_names=False, ).to_dict() @@ -182,13 +185,13 @@ def test_online() -> None: # Test if registry is actually reloaded and whether results return result = fs_fast_ttl.get_online_features( - feature_refs=[ + features=[ "driver_locations:lon", "customer_profile:avg_orders_day", "customer_profile:name", "customer_driver_combined:trips", ], - entity_rows=[{"driver": 1, "customer": 5}], + entity_rows=[{"driver_id": 1, "customer_id": 5}], full_feature_names=False, ).to_dict() assert result["lon"] == ["1.0"] @@ -208,13 +211,13 @@ def test_online() -> None: # Should return results (and fill the registry cache) result = fs_infinite_ttl.get_online_features( - feature_refs=[ + features=[ "driver_locations:lon", "customer_profile:avg_orders_day", "customer_profile:name", "customer_driver_combined:trips", ], - entity_rows=[{"driver": 1, "customer": 5}], + entity_rows=[{"driver_id": 1, "customer_id": 5}], full_feature_names=False, ).to_dict() assert result["lon"] == ["1.0"] @@ -228,13 +231,13 @@ def test_online() -> None: # TTL is infinite so this method should use registry cache result = fs_infinite_ttl.get_online_features( - feature_refs=[ + features=[ "driver_locations:lon", "customer_profile:avg_orders_day", "customer_profile:name", "customer_driver_combined:trips", ], - entity_rows=[{"driver": 1, "customer": 5}], + entity_rows=[{"driver_id": 1, "customer_id": 5}], full_feature_names=False, ).to_dict() assert result["lon"] == ["1.0"] @@ -284,7 +287,7 @@ def test_online_to_df(): 3 3.0 0.3 """ driver_key = EntityKeyProto( - join_keys=["driver"], entity_values=[ValueProto(int64_val=d)] + join_keys=["driver_id"], entity_values=[ValueProto(int64_val=d)] ) provider.online_write_batch( config=store.config, @@ -311,7 +314,7 @@ def test_online_to_df(): 6 6.0 foo6 60 """ customer_key = EntityKeyProto( - join_keys=["customer"], entity_values=[ValueProto(int64_val=c)] + join_keys=["customer_id"], entity_values=[ValueProto(string_val=str(c))] ) provider.online_write_batch( config=store.config, @@ -340,8 +343,8 @@ def test_online_to_df(): 6 3 18 """ combo_keys = EntityKeyProto( - join_keys=["customer", "driver"], - entity_values=[ValueProto(int64_val=c), ValueProto(int64_val=d)], + join_keys=["customer_id", "driver_id"], + entity_values=[ValueProto(string_val=str(c)), ValueProto(int64_val=d)], ) provider.online_write_batch( config=store.config, @@ -359,7 +362,7 @@ def test_online_to_df(): # Get online features in dataframe result_df = store.get_online_features( - feature_refs=[ + features=[ "driver_locations:lon", "driver_locations:lat", "customer_profile:avg_orders_day", @@ -369,7 +372,7 @@ def test_online_to_df(): ], # Reverse the row order entity_rows=[ - {"driver": d, "customer": c} + {"driver_id": d, "customer_id": c} for (d, c) in zip(reversed(driver_ids), reversed(customer_ids)) ], ).to_df() @@ -381,8 +384,8 @@ def test_online_to_df(): 1 4 1.0 0.1 4.0 foo4 40 4 """ df_dict = { - "driver": driver_ids, - "customer": customer_ids, + "driver_id": driver_ids, + "customer_id": [str(c) for c in customer_ids], "lon": [str(d * lon_multiply) for d in driver_ids], "lat": [d * lat_multiply for d in driver_ids], "avg_orders_day": [c * avg_order_day_multiply for c in customer_ids], @@ -392,8 +395,8 @@ def test_online_to_df(): } # Requested column order ordered_column = [ - "driver", - "customer", + "driver_id", + "customer_id", "lon", "lat", "avg_orders_day", diff --git a/sdk/python/tests/integration/online_store/test_push_online_retrieval.py b/sdk/python/tests/integration/online_store/test_push_online_retrieval.py new file mode 100644 index 0000000000..9e9ec953c7 --- /dev/null +++ b/sdk/python/tests/integration/online_store/test_push_online_retrieval.py @@ -0,0 +1,41 @@ +import datetime + +import pandas as pd +import pytest + +from tests.integration.feature_repos.repo_configuration import ( + construct_universal_feature_views, +) +from tests.integration.feature_repos.universal.entities import ( + customer, + driver, + location, +) + + +@pytest.mark.integration +@pytest.mark.universal +def test_push_features_and_read(environment, universal_data_sources): + store = environment.feature_store + + (_, datasets, data_sources) = universal_data_sources + feature_views = construct_universal_feature_views(data_sources) + + store.apply([driver(), customer(), location(), *feature_views.values()]) + data = { + "location_id": [1], + "temperature": [4], + "event_timestamp": [pd.Timestamp(datetime.datetime.utcnow()).round("ms")], + "created": [pd.Timestamp(datetime.datetime.utcnow()).round("ms")], + } + df_ingest = pd.DataFrame(data) + + store.push("location_stats_push_source", df_ingest) + + online_resp = store.get_online_features( + features=["pushable_location_stats:temperature"], + entity_rows=[{"location_id": 1}], + ) + online_resp_dict = online_resp.to_dict() + assert online_resp_dict["location_id"] == [1] + assert online_resp_dict["temperature"] == [4] diff --git a/sdk/python/tests/integration/online_store/test_universal_online.py b/sdk/python/tests/integration/online_store/test_universal_online.py new file mode 100644 index 0000000000..f4440dbfbc --- /dev/null +++ b/sdk/python/tests/integration/online_store/test_universal_online.py @@ -0,0 +1,896 @@ +import datetime +import itertools +import os +import time +import unittest +from datetime import timedelta +from typing import Any, Dict, List, Tuple, Union + +import assertpy +import numpy as np +import pandas as pd +import pytest +import requests +from botocore.exceptions import BotoCoreError + +from feast import Entity, FeatureService, FeatureView, Field, ValueType +from feast.errors import ( + FeatureNameCollisionError, + RequestDataNotFoundInEntityRowsException, +) +from feast.online_response import TIMESTAMP_POSTFIX +from feast.types import String +from feast.wait import wait_retry_backoff +from tests.integration.feature_repos.repo_configuration import ( + Environment, + construct_universal_feature_views, +) +from tests.integration.feature_repos.universal.entities import ( + customer, + driver, + location, +) +from tests.integration.feature_repos.universal.feature_views import ( + create_driver_hourly_stats_feature_view, + driver_feature_view, +) +from tests.utils.data_source_utils import prep_file_source + + +@pytest.mark.integration +def test_entity_ttl_online_store(local_redis_environment, redis_universal_data_sources): + if os.getenv("FEAST_IS_LOCAL_TEST", "False") == "True": + return + fs = local_redis_environment.feature_store + # setting ttl setting in online store to 1 second + fs.config.online_store.key_ttl_seconds = 1 + entities, datasets, data_sources = redis_universal_data_sources + driver_hourly_stats = create_driver_hourly_stats_feature_view(data_sources.driver) + driver_entity = driver() + + # Register Feature View and Entity + fs.apply([driver_hourly_stats, driver_entity]) + + # fake data to ingest into Online Store + data = { + "driver_id": [1], + "conv_rate": [0.5], + "acc_rate": [0.6], + "avg_daily_trips": [4], + "event_timestamp": [pd.Timestamp(datetime.datetime.utcnow()).round("ms")], + "created": [pd.Timestamp(datetime.datetime.utcnow()).round("ms")], + } + df_ingest = pd.DataFrame(data) + + # directly ingest data into the Online Store + fs.write_to_online_store("driver_stats", df_ingest) + + # assert the right data is in the Online Store + df = fs.get_online_features( + features=[ + "driver_stats:avg_daily_trips", + "driver_stats:acc_rate", + "driver_stats:conv_rate", + ], + entity_rows=[{"driver_id": 1}], + ).to_df() + assertpy.assert_that(df["avg_daily_trips"].iloc[0]).is_equal_to(4) + assertpy.assert_that(df["acc_rate"].iloc[0]).is_close_to(0.6, 1e-6) + assertpy.assert_that(df["conv_rate"].iloc[0]).is_close_to(0.5, 1e-6) + + # simulate time passing for testing ttl + time.sleep(1) + + # retrieve the same entity again + df = fs.get_online_features( + features=[ + "driver_stats:avg_daily_trips", + "driver_stats:acc_rate", + "driver_stats:conv_rate", + ], + entity_rows=[{"driver_id": 1}], + ).to_df() + # assert that the entity features expired in the online store + assertpy.assert_that(df["avg_daily_trips"].iloc[0]).is_none() + assertpy.assert_that(df["acc_rate"].iloc[0]).is_none() + assertpy.assert_that(df["conv_rate"].iloc[0]).is_none() + + +# TODO: make this work with all universal (all online store types) +@pytest.mark.integration +def test_write_to_online_store_event_check(local_redis_environment): + if os.getenv("FEAST_IS_LOCAL_TEST", "False") == "True": + return + fs = local_redis_environment.feature_store + + # write same data points 3 with different timestamps + now = pd.Timestamp(datetime.datetime.utcnow()).round("ms") + hour_ago = pd.Timestamp(datetime.datetime.utcnow() - timedelta(hours=1)).round("ms") + latest = pd.Timestamp(datetime.datetime.utcnow() + timedelta(seconds=1)).round("ms") + + data = { + "id": [123, 567, 890], + "string_col": ["OLD_FEATURE", "LATEST_VALUE2", "LATEST_VALUE3"], + "ts_1": [hour_ago, now, now], + } + dataframe_source = pd.DataFrame(data) + with prep_file_source(df=dataframe_source, timestamp_field="ts_1") as file_source: + e = Entity(name="id", value_type=ValueType.STRING) + + # Create Feature View + fv1 = FeatureView( + name="feature_view_123", + schema=[Field(name="string_col", dtype=String)], + entities=["id"], + batch_source=file_source, + ttl=timedelta(minutes=5), + ) + # Register Feature View and Entity + fs.apply([fv1, e]) + + # data to ingest into Online Store (recent) + data = { + "id": [123], + "string_col": ["hi_123"], + "ts_1": [now], + } + df_data = pd.DataFrame(data) + + # directly ingest data into the Online Store + fs.write_to_online_store("feature_view_123", df_data) + + df = fs.get_online_features( + features=["feature_view_123:string_col"], entity_rows=[{"id": 123}] + ).to_df() + assert df["string_col"].iloc[0] == "hi_123" + + # data to ingest into Online Store (1 hour delayed data) + # should now overwrite features for id=123 because it's less recent data + data = { + "id": [123, 567, 890], + "string_col": ["bye_321", "hello_123", "greetings_321"], + "ts_1": [hour_ago, hour_ago, hour_ago], + } + df_data = pd.DataFrame(data) + + # directly ingest data into the Online Store + fs.write_to_online_store("feature_view_123", df_data) + + df = fs.get_online_features( + features=["feature_view_123:string_col"], + entity_rows=[{"id": 123}, {"id": 567}, {"id": 890}], + ).to_df() + assert df["string_col"].iloc[0] == "hi_123" + assert df["string_col"].iloc[1] == "hello_123" + assert df["string_col"].iloc[2] == "greetings_321" + + # should overwrite string_col for id=123 because it's most recent based on event_timestamp + data = { + "id": [123], + "string_col": ["LATEST_VALUE"], + "ts_1": [latest], + } + df_data = pd.DataFrame(data) + + fs.write_to_online_store("feature_view_123", df_data) + + df = fs.get_online_features( + features=["feature_view_123:string_col"], + entity_rows=[{"id": 123}, {"id": 567}, {"id": 890}], + ).to_df() + assert df["string_col"].iloc[0] == "LATEST_VALUE" + assert df["string_col"].iloc[1] == "hello_123" + assert df["string_col"].iloc[2] == "greetings_321" + + # writes to online store via datasource (dataframe_source) materialization + fs.materialize( + start_date=datetime.datetime.now() - timedelta(hours=12), + end_date=datetime.datetime.utcnow(), + ) + + df = fs.get_online_features( + features=["feature_view_123:string_col"], + entity_rows=[{"id": 123}, {"id": 567}, {"id": 890}], + ).to_df() + assert df["string_col"].iloc[0] == "LATEST_VALUE" + assert df["string_col"].iloc[1] == "LATEST_VALUE2" + assert df["string_col"].iloc[2] == "LATEST_VALUE3" + + +@pytest.mark.integration +@pytest.mark.universal +def test_write_to_online_store(environment, universal_data_sources): + fs = environment.feature_store + entities, datasets, data_sources = universal_data_sources + driver_hourly_stats = create_driver_hourly_stats_feature_view(data_sources.driver) + driver_entity = driver() + + # Register Feature View and Entity + fs.apply([driver_hourly_stats, driver_entity]) + + # fake data to ingest into Online Store + data = { + "driver_id": [123], + "conv_rate": [0.85], + "acc_rate": [0.91], + "avg_daily_trips": [14], + "event_timestamp": [pd.Timestamp(datetime.datetime.utcnow()).round("ms")], + "created": [pd.Timestamp(datetime.datetime.utcnow()).round("ms")], + } + df_data = pd.DataFrame(data) + + # directly ingest data into the Online Store + fs.write_to_online_store("driver_stats", df_data) + + # assert the right data is in the Online Store + df = fs.get_online_features( + features=[ + "driver_stats:avg_daily_trips", + "driver_stats:acc_rate", + "driver_stats:conv_rate", + ], + entity_rows=[{"driver_id": 123}], + ).to_df() + assertpy.assert_that(df["avg_daily_trips"].iloc[0]).is_equal_to(14) + assertpy.assert_that(df["acc_rate"].iloc[0]).is_close_to(0.91, 1e-6) + assertpy.assert_that(df["conv_rate"].iloc[0]).is_close_to(0.85, 1e-6) + + +def _get_online_features_dict_remotely( + endpoint: str, + features: Union[List[str], FeatureService], + entity_rows: List[Dict[str, Any]], + full_feature_names: bool = False, +) -> Dict[str, List[Any]]: + """Sends the online feature request to a remote feature server (through endpoint) and returns the feature dict. + + The output should be identical to: + + fs.get_online_features(features=features, entity_rows=entity_rows, full_feature_names=full_feature_names).to_dict() + + This makes it easy to test the remote feature server by comparing the output to the local method. + + """ + request = { + # Convert list of dicts (entity_rows) into dict of lists (entities) for json request + "entities": {key: [row[key] for row in entity_rows] for key in entity_rows[0]}, + "full_feature_names": full_feature_names, + } + # Either set features of feature_service depending on the parameter + if isinstance(features, list): + request["features"] = features + else: + request["feature_service"] = features.name + for _ in range(25): + # Send the request to the remote feature server and get the response in JSON format + response = requests.post( + f"{endpoint}/get-online-features", json=request, timeout=30 + ).json() + # Retry if the response is internal server error, which can happen when lambda is being restarted + if response.get("message") != "Internal Server Error": + break + # Sleep between retries to give the server some time to start + time.sleep(15) + else: + raise Exception("Failed to get online features from remote feature server") + if "metadata" not in response: + raise Exception( + f"Failed to get online features from remote feature server {response}" + ) + keys = response["metadata"]["feature_names"] + # Get rid of unnecessary structure in the response, leaving list of dicts + values = [row["values"] for row in response["results"]] + # Convert list of dicts (response) into dict of lists which is the format of the return value + return {key: feature_vector for key, feature_vector in zip(keys, values)} + + +def get_online_features_dict( + environment: Environment, + features: Union[List[str], FeatureService], + entity_rows: List[Dict[str, Any]], + full_feature_names: bool = False, +) -> Dict[str, List[Any]]: + """Get the online feature values from both SDK and remote feature servers, assert equality and return values. + + Always use this method instead of fs.get_online_features(...) in this test file. + + """ + online_features = environment.feature_store.get_online_features( + features=features, + entity_rows=entity_rows, + full_feature_names=full_feature_names, + ) + assertpy.assert_that(online_features).is_not_none() + dict1 = online_features.to_dict() + + endpoint = environment.get_feature_server_endpoint() + # If endpoint is None, it means that a local / remote feature server aren't configured + if endpoint is not None: + dict2 = _get_online_features_dict_remotely( + endpoint=endpoint, + features=features, + entity_rows=entity_rows, + full_feature_names=full_feature_names, + ) + + # Make sure that the two dicts are equal + assertpy.assert_that(dict1).is_equal_to(dict2) + elif environment.python_feature_server: + raise ValueError( + "feature_store.get_feature_server_endpoint() is None while python feature server is enabled" + ) + return dict1 + + +@pytest.mark.integration +@pytest.mark.universal +@pytest.mark.parametrize("full_feature_names", [True, False], ids=lambda v: str(v)) +def test_online_retrieval_with_event_timestamps( + environment, universal_data_sources, full_feature_names +): + fs = environment.feature_store + entities, datasets, data_sources = universal_data_sources + feature_views = construct_universal_feature_views(data_sources) + + fs.apply([driver(), feature_views.driver, feature_views.global_fv]) + + # fake data to ingest into Online Store + data = { + "driver_id": [1, 2], + "conv_rate": [0.5, 0.3], + "acc_rate": [0.6, 0.4], + "avg_daily_trips": [4, 5], + "event_timestamp": [ + pd.to_datetime(1646263500, utc=True, unit="s"), + pd.to_datetime(1646263600, utc=True, unit="s"), + ], + "created": [ + pd.to_datetime(1646263500, unit="s"), + pd.to_datetime(1646263600, unit="s"), + ], + } + df_ingest = pd.DataFrame(data) + + # directly ingest data into the Online Store + fs.write_to_online_store("driver_stats", df_ingest) + + response = fs.get_online_features( + features=[ + "driver_stats:avg_daily_trips", + "driver_stats:acc_rate", + "driver_stats:conv_rate", + ], + entity_rows=[{"driver_id": 1}, {"driver_id": 2}], + ) + df = response.to_df(True) + assertpy.assert_that(len(df)).is_equal_to(2) + assertpy.assert_that(df["driver_id"].iloc[0]).is_equal_to(1) + assertpy.assert_that(df["driver_id"].iloc[1]).is_equal_to(2) + assertpy.assert_that(df["avg_daily_trips" + TIMESTAMP_POSTFIX].iloc[0]).is_equal_to( + 1646263500 + ) + assertpy.assert_that(df["avg_daily_trips" + TIMESTAMP_POSTFIX].iloc[1]).is_equal_to( + 1646263600 + ) + assertpy.assert_that(df["acc_rate" + TIMESTAMP_POSTFIX].iloc[0]).is_equal_to( + 1646263500 + ) + assertpy.assert_that(df["acc_rate" + TIMESTAMP_POSTFIX].iloc[1]).is_equal_to( + 1646263600 + ) + assertpy.assert_that(df["conv_rate" + TIMESTAMP_POSTFIX].iloc[0]).is_equal_to( + 1646263500 + ) + assertpy.assert_that(df["conv_rate" + TIMESTAMP_POSTFIX].iloc[1]).is_equal_to( + 1646263600 + ) + + +@pytest.mark.integration +@pytest.mark.universal +@pytest.mark.goserver +@pytest.mark.parametrize("full_feature_names", [True, False], ids=lambda v: str(v)) +def test_online_retrieval(environment, universal_data_sources, full_feature_names): + fs = environment.feature_store + entities, datasets, data_sources = universal_data_sources + feature_views = construct_universal_feature_views(data_sources) + + feature_service = FeatureService( + "convrate_plus100", + features=[ + feature_views.driver[["conv_rate"]], + feature_views.driver_odfv, + feature_views.customer[["current_balance"]], + ], + ) + feature_service_entity_mapping = FeatureService( + name="entity_mapping", + features=[ + feature_views.location.with_name("origin").with_join_key_map( + {"location_id": "origin_id"} + ), + feature_views.location.with_name("destination").with_join_key_map( + {"location_id": "destination_id"} + ), + ], + ) + + feast_objects = [] + feast_objects.extend(feature_views.values()) + feast_objects.extend( + [ + driver(), + customer(), + location(), + feature_service, + feature_service_entity_mapping, + ] + ) + fs.apply(feast_objects) + fs.materialize( + environment.start_date - timedelta(days=1), + environment.end_date + timedelta(days=1), + ) + + entity_sample = datasets.orders_df.sample(10)[ + ["customer_id", "driver_id", "order_id", "event_timestamp"] + ] + orders_df = datasets.orders_df[ + ( + datasets.orders_df["customer_id"].isin(entity_sample["customer_id"]) + & datasets.orders_df["driver_id"].isin(entity_sample["driver_id"]) + ) + ] + + sample_drivers = entity_sample["driver_id"] + drivers_df = datasets.driver_df[ + datasets.driver_df["driver_id"].isin(sample_drivers) + ] + + sample_customers = entity_sample["customer_id"] + customers_df = datasets.customer_df[ + datasets.customer_df["customer_id"].isin(sample_customers) + ] + + location_pairs = np.array(list(itertools.permutations(entities.location_vals, 2))) + sample_location_pairs = location_pairs[ + np.random.choice(len(location_pairs), 10) + ].T.tolist() + origins_df = datasets.location_df[ + datasets.location_df["location_id"].isin(sample_location_pairs[0]) + ] + destinations_df = datasets.location_df[ + datasets.location_df["location_id"].isin(sample_location_pairs[1]) + ] + + global_df = datasets.global_df + + entity_rows = [ + {"driver_id": d, "customer_id": c, "val_to_add": 50} + for (d, c) in zip(sample_drivers, sample_customers) + ] + + feature_refs = [ + "driver_stats:conv_rate", + "driver_stats:avg_daily_trips", + "customer_profile:current_balance", + "customer_profile:avg_passenger_count", + "customer_profile:lifetime_trip_count", + "conv_rate_plus_100:conv_rate_plus_100", + "conv_rate_plus_100:conv_rate_plus_val_to_add", + "order:order_is_success", + "global_stats:num_rides", + "global_stats:avg_ride_length", + ] + unprefixed_feature_refs = [f.rsplit(":", 1)[-1] for f in feature_refs if ":" in f] + # Remove the on demand feature view output features, since they're not present in the source dataframe + unprefixed_feature_refs.remove("conv_rate_plus_100") + unprefixed_feature_refs.remove("conv_rate_plus_val_to_add") + + online_features_dict = get_online_features_dict( + environment=environment, + features=feature_refs, + entity_rows=entity_rows, + full_feature_names=full_feature_names, + ) + + # Test that the on demand feature views compute properly even if the dependent conv_rate + # feature isn't requested. + online_features_no_conv_rate = get_online_features_dict( + environment=environment, + features=[ref for ref in feature_refs if ref != "driver_stats:conv_rate"], + entity_rows=entity_rows, + full_feature_names=full_feature_names, + ) + + assert online_features_no_conv_rate is not None + + keys = set(online_features_dict.keys()) + expected_keys = set( + f.replace(":", "__") if full_feature_names else f.split(":")[-1] + for f in feature_refs + ) | {"customer_id", "driver_id"} + assert ( + keys == expected_keys + ), f"Response keys are different from expected: {keys - expected_keys} (extra) and {expected_keys - keys} (missing)" + + tc = unittest.TestCase() + for i, entity_row in enumerate(entity_rows): + df_features = get_latest_feature_values_from_dataframes( + driver_df=drivers_df, + customer_df=customers_df, + orders_df=orders_df, + global_df=global_df, + entity_row=entity_row, + ) + + assert df_features["customer_id"] == online_features_dict["customer_id"][i] + assert df_features["driver_id"] == online_features_dict["driver_id"][i] + tc.assertAlmostEqual( + online_features_dict[ + response_feature_name( + "conv_rate_plus_100", feature_refs, full_feature_names + ) + ][i], + df_features["conv_rate"] + 100, + delta=0.0001, + ) + tc.assertAlmostEqual( + online_features_dict[ + response_feature_name( + "conv_rate_plus_val_to_add", feature_refs, full_feature_names + ) + ][i], + df_features["conv_rate"] + df_features["val_to_add"], + delta=0.0001, + ) + for unprefixed_feature_ref in unprefixed_feature_refs: + tc.assertAlmostEqual( + df_features[unprefixed_feature_ref], + online_features_dict[ + response_feature_name( + unprefixed_feature_ref, feature_refs, full_feature_names + ) + ][i], + delta=0.0001, + ) + + # Check what happens for missing values + missing_responses_dict = get_online_features_dict( + environment=environment, + features=feature_refs, + entity_rows=[{"driver_id": 0, "customer_id": 0, "val_to_add": 100}], + full_feature_names=full_feature_names, + ) + assert missing_responses_dict is not None + for unprefixed_feature_ref in unprefixed_feature_refs: + if unprefixed_feature_ref not in {"num_rides", "avg_ride_length"}: + tc.assertIsNone( + missing_responses_dict[ + response_feature_name( + unprefixed_feature_ref, feature_refs, full_feature_names + ) + ][0] + ) + + # Check what happens for missing request data + with pytest.raises(RequestDataNotFoundInEntityRowsException): + get_online_features_dict( + environment=environment, + features=feature_refs, + entity_rows=[{"driver_id": 0, "customer_id": 0}], + full_feature_names=full_feature_names, + ) + + assert_feature_service_correctness( + environment, + feature_service, + entity_rows, + full_feature_names, + drivers_df, + customers_df, + orders_df, + global_df, + ) + + entity_rows = [ + {"origin_id": origin, "destination_id": destination} + for (_driver, _customer, origin, destination) in zip( + sample_drivers, sample_customers, *sample_location_pairs + ) + ] + assert_feature_service_entity_mapping_correctness( + environment, + feature_service_entity_mapping, + entity_rows, + full_feature_names, + origins_df, + destinations_df, + ) + + +@pytest.mark.integration +@pytest.mark.universal +def test_online_store_cleanup(environment, universal_data_sources): + """ + Some online store implementations (like Redis) keep features from different features views + but with common entities together. + This might end up with deletion of all features attached to the entity, + when only one feature view was deletion target (see https://github.com/feast-dev/feast/issues/2150). + + Plan: + 1. Register two feature views with common entity "driver" + 2. Materialize data + 3. Check if features are available (via online retrieval) + 4. Delete one feature view + 5. Check that features for other are still available + 6. Delete another feature view (and create again) + 7. Verify that features for both feature view were deleted + """ + fs = environment.feature_store + entities, datasets, data_sources = universal_data_sources + driver_stats_fv = construct_universal_feature_views(data_sources).driver + + driver_entities = entities.driver_vals + df = pd.DataFrame( + { + "ts_1": [environment.end_date] * len(driver_entities), + "created_ts": [environment.end_date] * len(driver_entities), + "driver_id": driver_entities, + "value": np.random.random(size=len(driver_entities)), + } + ) + + ds = environment.data_source_creator.create_data_source( + df, destination_name="simple_driver_dataset" + ) + + simple_driver_fv = driver_feature_view( + data_source=ds, name="test_universal_online_simple_driver" + ) + + fs.apply([driver(), simple_driver_fv, driver_stats_fv]) + + fs.materialize( + environment.start_date - timedelta(days=1), + environment.end_date + timedelta(days=1), + ) + expected_values = df.sort_values(by="driver_id") + + features = [f"{simple_driver_fv.name}:value"] + entity_rows = [{"driver_id": driver_id} for driver_id in sorted(driver_entities)] + + online_features = fs.get_online_features( + features=features, entity_rows=entity_rows + ).to_dict() + assert np.allclose(expected_values["value"], online_features["value"]) + + fs.apply( + objects=[simple_driver_fv], objects_to_delete=[driver_stats_fv], partial=False + ) + + online_features = fs.get_online_features( + features=features, entity_rows=entity_rows + ).to_dict() + assert np.allclose(expected_values["value"], online_features["value"]) + + fs.apply(objects=[], objects_to_delete=[simple_driver_fv], partial=False) + + def eventually_apply() -> Tuple[None, bool]: + try: + fs.apply([simple_driver_fv]) + except BotoCoreError: + return None, False + + return None, True + + # Online store backend might have eventual consistency in schema update + # So recreating table that was just deleted might need some retries + wait_retry_backoff(eventually_apply, timeout_secs=60) + + online_features = fs.get_online_features( + features=features, entity_rows=entity_rows + ).to_dict() + assert all(v is None for v in online_features["value"]) + + +def response_feature_name( + feature: str, feature_refs: List[str], full_feature_names: bool +) -> str: + if not full_feature_names: + return feature + + for feature_ref in feature_refs: + if feature_ref.endswith(feature): + return feature_ref.replace(":", "__") + + return feature + + +def get_latest_row(entity_row, df, join_key, entity_key): + rows = df[df[join_key] == entity_row[entity_key]] + return rows.loc[rows["event_timestamp"].idxmax()].to_dict() + + +def get_latest_feature_values_from_dataframes( + driver_df, + customer_df, + orders_df, + entity_row, + global_df=None, + origin_df=None, + destination_df=None, +): + latest_driver_row = get_latest_row(entity_row, driver_df, "driver_id", "driver_id") + latest_customer_row = get_latest_row( + entity_row, customer_df, "customer_id", "customer_id" + ) + + # Since the event timestamp columns may contain timestamps of different timezones, + # we must first convert the timestamps to UTC before we can compare them. + order_rows = orders_df[ + (orders_df["driver_id"] == entity_row["driver_id"]) + & (orders_df["customer_id"] == entity_row["customer_id"]) + ] + timestamps = order_rows[["event_timestamp"]] + timestamps["event_timestamp"] = pd.to_datetime( + timestamps["event_timestamp"], utc=True + ) + max_index = timestamps["event_timestamp"].idxmax() + latest_orders_row = order_rows.loc[max_index] + + if global_df is not None: + latest_global_row = global_df.loc[ + global_df["event_timestamp"].idxmax() + ].to_dict() + if origin_df is not None: + latest_location_row = get_latest_feature_values_for_location_df( + entity_row, origin_df, destination_df + ) + + request_data_features = entity_row.copy() + request_data_features.pop("driver_id") + request_data_features.pop("customer_id") + if global_df is not None: + return { + **latest_customer_row, + **latest_driver_row, + **latest_orders_row, + **latest_global_row, + **request_data_features, + } + if origin_df is not None: + request_data_features.pop("origin_id") + request_data_features.pop("destination_id") + return { + **latest_customer_row, + **latest_driver_row, + **latest_orders_row, + **latest_location_row, + **request_data_features, + } + return { + **latest_customer_row, + **latest_driver_row, + **latest_orders_row, + **request_data_features, + } + + +def get_latest_feature_values_for_location_df(entity_row, origin_df, destination_df): + latest_origin_row = get_latest_row( + entity_row, origin_df, "location_id", "origin_id" + ) + latest_destination_row = get_latest_row( + entity_row, destination_df, "location_id", "destination_id" + ) + # Need full feature names for shadow entities + latest_origin_row["origin__temperature"] = latest_origin_row.pop("temperature") + latest_destination_row["destination__temperature"] = latest_destination_row.pop( + "temperature" + ) + + return { + **latest_origin_row, + **latest_destination_row, + } + + +def assert_feature_service_correctness( + environment, + feature_service, + entity_rows, + full_feature_names, + drivers_df, + customers_df, + orders_df, + global_df, +): + feature_service_online_features_dict = get_online_features_dict( + environment=environment, + features=feature_service, + entity_rows=entity_rows, + full_feature_names=full_feature_names, + ) + feature_service_keys = feature_service_online_features_dict.keys() + expected_feature_refs = [ + f"{projection.name_to_use()}__{feature.name}" + if full_feature_names + else feature.name + for projection in feature_service.feature_view_projections + for feature in projection.features + ] + assert set(feature_service_keys) == set(expected_feature_refs) | { + "customer_id", + "driver_id", + } + + tc = unittest.TestCase() + for i, entity_row in enumerate(entity_rows): + df_features = get_latest_feature_values_from_dataframes( + driver_df=drivers_df, + customer_df=customers_df, + orders_df=orders_df, + global_df=global_df, + entity_row=entity_row, + ) + tc.assertAlmostEqual( + feature_service_online_features_dict[ + response_feature_name( + "conv_rate_plus_100", expected_feature_refs, full_feature_names + ) + ][i], + df_features["conv_rate"] + 100, + delta=0.0001, + ) + + +def assert_feature_service_entity_mapping_correctness( + environment, + feature_service, + entity_rows, + full_feature_names, + origins_df, + destinations_df, +): + if full_feature_names: + feature_service_online_features_dict = get_online_features_dict( + environment=environment, + features=feature_service, + entity_rows=entity_rows, + full_feature_names=full_feature_names, + ) + feature_service_keys = feature_service_online_features_dict.keys() + + expected_features = [ + f"{projection.name_to_use()}__{feature.name}" + if full_feature_names + else feature.name + for projection in feature_service.feature_view_projections + for feature in projection.features + ] + assert set(feature_service_keys) == set(expected_features) | { + "destination_id", + "origin_id", + } + + for i, entity_row in enumerate(entity_rows): + df_features = get_latest_feature_values_for_location_df( + origin_df=origins_df, + destination_df=destinations_df, + entity_row=entity_row, + ) + for feature_name in ["origin__temperature", "destination__temperature"]: + assert ( + feature_service_online_features_dict[feature_name][i] + == df_features[feature_name] + ) + else: + # using 2 of the same FeatureView without full_feature_names=True will result in collision + with pytest.raises(FeatureNameCollisionError): + get_online_features_dict( + environment=environment, + features=feature_service, + entity_rows=entity_rows, + full_feature_names=full_feature_names, + ) diff --git a/sdk/python/tests/integration/registration/test_cli.py b/sdk/python/tests/integration/registration/test_cli.py new file mode 100644 index 0000000000..655e53e759 --- /dev/null +++ b/sdk/python/tests/integration/registration/test_cli.py @@ -0,0 +1,333 @@ +import os +import tempfile +import uuid +from contextlib import contextmanager +from pathlib import Path +from textwrap import dedent +from typing import List + +import pytest +import yaml +from assertpy import assertpy + +from feast import FeatureStore, RepoConfig +from tests.integration.feature_repos.integration_test_repo_config import ( + IntegrationTestRepoConfig, +) +from tests.integration.feature_repos.repo_configuration import Environment +from tests.integration.feature_repos.universal.data_source_creator import ( + DataSourceCreator, +) +from tests.integration.feature_repos.universal.data_sources.bigquery import ( + BigQueryDataSourceCreator, +) +from tests.integration.feature_repos.universal.data_sources.file import ( + FileDataSourceCreator, +) +from tests.integration.feature_repos.universal.data_sources.redshift import ( + RedshiftDataSourceCreator, +) +from tests.utils.cli_utils import CliRunner, get_example_repo +from tests.utils.online_read_write_test import basic_rw_test + + +@pytest.mark.integration +@pytest.mark.universal +def test_universal_cli(environment: Environment): + project = f"test_universal_cli_{str(uuid.uuid4()).replace('-', '')[:8]}" + runner = CliRunner() + + with tempfile.TemporaryDirectory() as repo_dir_name: + try: + repo_path = Path(repo_dir_name) + feature_store_yaml = make_feature_store_yaml( + project, environment.test_repo_config, repo_path + ) + + repo_config = repo_path / "feature_store.yaml" + + repo_config.write_text(dedent(feature_store_yaml)) + + repo_example = repo_path / "example.py" + repo_example.write_text(get_example_repo("example_feature_repo_1.py")) + result = runner.run(["apply"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + + # Store registry contents, to be compared later. + fs = FeatureStore(repo_path=str(repo_path)) + registry_dict = fs.registry.to_dict(project=project) + # Save only the specs, not the metadata. + registry_specs = { + key: [fco["spec"] if "spec" in fco else fco for fco in value] + for key, value in registry_dict.items() + } + + # entity & feature view list commands should succeed + result = runner.run(["entities", "list"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run(["feature-views", "list"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run(["feature-services", "list"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run(["data-sources", "list"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + + # entity & feature view describe commands should succeed when objects exist + result = runner.run(["entities", "describe", "driver"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run( + ["feature-views", "describe", "driver_locations"], cwd=repo_path + ) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run( + ["feature-services", "describe", "driver_locations_service"], + cwd=repo_path, + ) + assertpy.assert_that(result.returncode).is_equal_to(0) + assertpy.assert_that(fs.list_feature_views()).is_length(4) + result = runner.run( + ["data-sources", "describe", "customer_profile_source"], cwd=repo_path, + ) + assertpy.assert_that(result.returncode).is_equal_to(0) + assertpy.assert_that(fs.list_data_sources()).is_length(4) + + # entity & feature view describe commands should fail when objects don't exist + result = runner.run(["entities", "describe", "foo"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(1) + result = runner.run(["feature-views", "describe", "foo"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(1) + result = runner.run(["feature-services", "describe", "foo"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(1) + result = runner.run(["data-sources", "describe", "foo"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(1) + + # Doing another apply should be a no op, and should not cause errors + result = runner.run(["apply"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + basic_rw_test( + FeatureStore(repo_path=str(repo_path), config=None), + view_name="driver_locations", + ) + + # Confirm that registry contents have not changed. + registry_dict = fs.registry.to_dict(project=project) + assertpy.assert_that(registry_specs).is_equal_to( + { + key: [fco["spec"] if "spec" in fco else fco for fco in value] + for key, value in registry_dict.items() + } + ) + + result = runner.run(["teardown"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + finally: + runner.run(["teardown"], cwd=repo_path) + + +def make_feature_store_yaml(project, test_repo_config, repo_dir_name: Path): + offline_creator: DataSourceCreator = test_repo_config.offline_store_creator(project) + + offline_store_config = offline_creator.create_offline_store_config() + online_store = test_repo_config.online_store + + config = RepoConfig( + registry=str(Path(repo_dir_name) / "registry.db"), + project=project, + provider=test_repo_config.provider, + offline_store=offline_store_config, + online_store=online_store, + repo_path=str(Path(repo_dir_name)), + ) + config_dict = config.dict() + if ( + isinstance(config_dict["online_store"], dict) + and "redis_type" in config_dict["online_store"] + ): + if str(config_dict["online_store"]["redis_type"]) == "RedisType.redis_cluster": + config_dict["online_store"]["redis_type"] = "redis_cluster" + elif str(config_dict["online_store"]["redis_type"]) == "RedisType.redis": + config_dict["online_store"]["redis_type"] = "redis" + config_dict["repo_path"] = str(config_dict["repo_path"]) + return yaml.safe_dump(config_dict) + + +NULLABLE_ONLINE_STORE_CONFIGS: List[IntegrationTestRepoConfig] = [ + IntegrationTestRepoConfig( + provider="local", + offline_store_creator=FileDataSourceCreator, + online_store=None, + ), +] + +if os.getenv("FEAST_IS_LOCAL_TEST", "False") == "True": + NULLABLE_ONLINE_STORE_CONFIGS.extend( + [ + IntegrationTestRepoConfig( + provider="gcp", + offline_store_creator=BigQueryDataSourceCreator, + online_store=None, + ), + IntegrationTestRepoConfig( + provider="aws", + offline_store_creator=RedshiftDataSourceCreator, + online_store=None, + ), + ] + ) + + +@pytest.mark.integration +@pytest.mark.parametrize("test_nullable_online_store", NULLABLE_ONLINE_STORE_CONFIGS) +def test_nullable_online_store(test_nullable_online_store) -> None: + project = f"test_nullable_online_store{str(uuid.uuid4()).replace('-', '')[:8]}" + runner = CliRunner() + + with tempfile.TemporaryDirectory() as repo_dir_name: + try: + repo_path = Path(repo_dir_name) + feature_store_yaml = make_feature_store_yaml( + project, test_nullable_online_store, repo_path + ) + + repo_config = repo_path / "feature_store.yaml" + + repo_config.write_text(dedent(feature_store_yaml)) + + repo_example = repo_path / "example.py" + repo_example.write_text(get_example_repo("example_feature_repo_1.py")) + result = runner.run(["apply"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + finally: + runner.run(["teardown"], cwd=repo_path) + + +@contextmanager +def setup_third_party_provider_repo(provider_name: str): + with tempfile.TemporaryDirectory() as repo_dir_name: + + # Construct an example repo in a temporary dir + repo_path = Path(repo_dir_name) + + repo_config = repo_path / "feature_store.yaml" + + repo_config.write_text( + dedent( + f""" + project: foo + registry: data/registry.db + provider: {provider_name} + online_store: + path: data/online_store.db + type: sqlite + offline_store: + type: file + """ + ) + ) + + (repo_path / "foo").mkdir() + repo_example = repo_path / "foo/provider.py" + repo_example.write_text( + (Path(__file__).parents[2] / "foo_provider.py").read_text() + ) + + yield repo_path + + +@contextmanager +def setup_third_party_registry_store_repo(registry_store: str): + with tempfile.TemporaryDirectory() as repo_dir_name: + + # Construct an example repo in a temporary dir + repo_path = Path(repo_dir_name) + + repo_config = repo_path / "feature_store.yaml" + + repo_config.write_text( + dedent( + f""" + project: foo + registry: + registry_store_type: {registry_store} + path: foobar://foo.bar + provider: local + online_store: + path: data/online_store.db + type: sqlite + offline_store: + type: file + """ + ) + ) + + (repo_path / "foo").mkdir() + repo_example = repo_path / "foo/registry_store.py" + repo_example.write_text( + (Path(__file__).parents[2] / "foo_registry_store.py").read_text() + ) + + yield repo_path + + +def test_3rd_party_providers() -> None: + """ + Test running apply on third party providers + """ + runner = CliRunner() + # Check with incorrect built-in provider name (no dots) + with setup_third_party_provider_repo("feast123") as repo_path: + return_code, output = runner.run_with_output(["apply"], cwd=repo_path) + assertpy.assert_that(return_code).is_equal_to(1) + assertpy.assert_that(output).contains(b"Provider 'feast123' is not implemented") + # Check with incorrect third-party provider name (with dots) + with setup_third_party_provider_repo("feast_foo.Provider") as repo_path: + return_code, output = runner.run_with_output(["apply"], cwd=repo_path) + assertpy.assert_that(return_code).is_equal_to(1) + assertpy.assert_that(output).contains( + b"Could not import module 'feast_foo' while attempting to load class 'Provider'" + ) + # Check with incorrect third-party provider name (with dots) + with setup_third_party_provider_repo("foo.FooProvider") as repo_path: + return_code, output = runner.run_with_output(["apply"], cwd=repo_path) + assertpy.assert_that(return_code).is_equal_to(1) + assertpy.assert_that(output).contains( + b"Could not import class 'FooProvider' from module 'foo'" + ) + # Check with correct third-party provider name + with setup_third_party_provider_repo("foo.provider.FooProvider") as repo_path: + return_code, output = runner.run_with_output(["apply"], cwd=repo_path) + assertpy.assert_that(return_code).is_equal_to(0) + + +def test_3rd_party_registry_store() -> None: + """ + Test running apply on third party registry stores + """ + runner = CliRunner() + # Check with incorrect built-in provider name (no dots) + with setup_third_party_registry_store_repo("feast123") as repo_path: + return_code, output = runner.run_with_output(["apply"], cwd=repo_path) + assertpy.assert_that(return_code).is_equal_to(1) + assertpy.assert_that(output).contains( + b'Registry store class name should end with "RegistryStore"' + ) + # Check with incorrect third-party registry store name (with dots) + with setup_third_party_registry_store_repo("feast_foo.RegistryStore") as repo_path: + return_code, output = runner.run_with_output(["apply"], cwd=repo_path) + assertpy.assert_that(return_code).is_equal_to(1) + assertpy.assert_that(output).contains( + b"Could not import module 'feast_foo' while attempting to load class 'RegistryStore'" + ) + # Check with incorrect third-party registry store name (with dots) + with setup_third_party_registry_store_repo("foo.FooRegistryStore") as repo_path: + return_code, output = runner.run_with_output(["apply"], cwd=repo_path) + assertpy.assert_that(return_code).is_equal_to(1) + assertpy.assert_that(output).contains( + b"Could not import class 'FooRegistryStore' from module 'foo'" + ) + # Check with correct third-party registry store name + with setup_third_party_registry_store_repo( + "foo.registry_store.FooRegistryStore" + ) as repo_path: + return_code, output = runner.run_with_output(["apply"], cwd=repo_path) + assertpy.assert_that(return_code).is_equal_to(0) diff --git a/sdk/python/tests/integration/registration/test_cli_apply_duplicates.py b/sdk/python/tests/integration/registration/test_cli_apply_duplicates.py new file mode 100644 index 0000000000..bad3b50a80 --- /dev/null +++ b/sdk/python/tests/integration/registration/test_cli_apply_duplicates.py @@ -0,0 +1,180 @@ +import tempfile +from pathlib import Path +from textwrap import dedent + +from tests.utils.cli_utils import CliRunner, get_example_repo + + +def test_cli_apply_duplicated_featureview_names() -> None: + run_simple_apply_test( + example_repo_file_name="example_feature_repo_with_duplicated_featureview_names.py", + expected_error=b"Please ensure that all feature view names are case-insensitively unique", + ) + + +def test_cli_apply_duplicate_data_source_names() -> None: + run_simple_apply_test( + example_repo_file_name="example_repo_duplicate_data_source_names.py", + expected_error=b"Please ensure that all data source names are case-insensitively unique", + ) + + +def run_simple_apply_test(example_repo_file_name: str, expected_error: bytes): + with tempfile.TemporaryDirectory() as repo_dir_name, tempfile.TemporaryDirectory() as data_dir_name: + runner = CliRunner() + # Construct an example repo in a temporary dir + repo_path = Path(repo_dir_name) + data_path = Path(data_dir_name) + + repo_config = repo_path / "feature_store.yaml" + + repo_config.write_text( + dedent( + f""" + project: foo + registry: {data_path / "registry.db"} + provider: local + online_store: + path: {data_path / "online_store.db"} + """ + ) + ) + + repo_example = repo_path / "example.py" + repo_example.write_text(get_example_repo(example_repo_file_name)) + rc, output = runner.run_with_output(["apply"], cwd=repo_path) + + assert rc != 0 and expected_error in output + + +def test_cli_apply_imported_featureview() -> None: + """ + Test apply feature views with duplicated names and single py file in a feature repo using CLI + """ + + with tempfile.TemporaryDirectory() as repo_dir_name, tempfile.TemporaryDirectory() as data_dir_name: + runner = CliRunner() + # Construct an example repo in a temporary dir + repo_path = Path(repo_dir_name) + data_path = Path(data_dir_name) + + repo_config = repo_path / "feature_store.yaml" + + repo_config.write_text( + dedent( + f""" + project: foo + registry: {data_path / "registry.db"} + provider: local + online_store: + path: {data_path / "online_store.db"} + """ + ) + ) + + repo_example = repo_path / "example.py" + repo_example.write_text(get_example_repo("example_feature_repo_2.py")) + repo_example_2 = repo_path / "example_2.py" + repo_example_2.write_text( + "from example import driver_hourly_stats_view\n" + "from feast import FeatureService\n" + "a_feature_service = FeatureService(\n" + " name='driver_locations_service',\n" + " features=[driver_hourly_stats_view],\n" + ")\n" + ) + + rc, output = runner.run_with_output(["apply"], cwd=repo_path) + + assert rc == 0 + assert b"Created feature service driver_locations_service" in output + + +def test_cli_apply_imported_featureview_with_duplication() -> None: + """ + Test apply feature views with duplicated names and single py file in a feature repo using CLI + """ + + with tempfile.TemporaryDirectory() as repo_dir_name, tempfile.TemporaryDirectory() as data_dir_name: + runner = CliRunner() + # Construct an example repo in a temporary dir + repo_path = Path(repo_dir_name) + data_path = Path(data_dir_name) + + repo_config = repo_path / "feature_store.yaml" + + repo_config.write_text( + dedent( + f""" + project: foo + registry: {data_path / "registry.db"} + provider: local + online_store: + path: {data_path / "online_store.db"} + """ + ) + ) + + repo_example = repo_path / "example.py" + repo_example.write_text(get_example_repo("example_feature_repo_2.py")) + repo_example_2 = repo_path / "example_2.py" + repo_example_2.write_text( + "from datetime import timedelta\n" + "from example import driver_hourly_stats, driver_hourly_stats_view\n" + "from feast import FeatureService, FeatureView\n" + "a_feature_service = FeatureService(\n" + " name='driver_locations_service',\n" + " features=[driver_hourly_stats_view],\n" + ")\n" + "driver_hourly_stats_view_2 = FeatureView(\n" + " name='driver_hourly_stats',\n" + " entities=['driver_id'],\n" + " ttl=timedelta(days=1),\n" + " online=True,\n" + " batch_source=driver_hourly_stats,\n" + " tags={'dummy': 'true'})\n" + ) + + rc, output = runner.run_with_output(["apply"], cwd=repo_path) + + assert rc != 0 + assert ( + b"More than one feature view with name driver_hourly_stats found." in output + ) + + +def test_cli_apply_duplicated_featureview_names_multiple_py_files() -> None: + """ + Test apply feature views with duplicated names from multiple py files in a feature repo using CLI + """ + + with tempfile.TemporaryDirectory() as repo_dir_name, tempfile.TemporaryDirectory() as data_dir_name: + runner = CliRunner() + # Construct an example repo in a temporary dir + repo_path = Path(repo_dir_name) + data_path = Path(data_dir_name) + + repo_config = repo_path / "feature_store.yaml" + + repo_config.write_text( + dedent( + f""" + project: foo + registry: {data_path / "registry.db"} + provider: local + online_store: + path: {data_path / "online_store.db"} + """ + ) + ) + # Create multiple py files containing the same feature view name + for i in range(3): + repo_example = repo_path / f"example{i}.py" + repo_example.write_text(get_example_repo("example_feature_repo_2.py")) + rc, output = runner.run_with_output(["apply"], cwd=repo_path) + + assert ( + rc != 0 + and b"Please ensure that all feature view names are case-insensitively unique" + in output + ) diff --git a/sdk/python/tests/integration/registration/test_cli_aws.py b/sdk/python/tests/integration/registration/test_cli_aws.py deleted file mode 100644 index f2345420ab..0000000000 --- a/sdk/python/tests/integration/registration/test_cli_aws.py +++ /dev/null @@ -1,56 +0,0 @@ -import random -import string -import tempfile -from pathlib import Path -from textwrap import dedent - -import pytest - -from feast.feature_store import FeatureStore -from tests.utils.cli_utils import CliRunner, get_example_repo -from tests.utils.online_read_write_test import basic_rw_test - - -@pytest.mark.integration -def test_basic() -> None: - project_id = "".join( - random.choice(string.ascii_lowercase + string.digits) for _ in range(10) - ) - runner = CliRunner() - with tempfile.TemporaryDirectory() as repo_dir_name, tempfile.TemporaryDirectory() as data_dir_name: - - repo_path = Path(repo_dir_name) - data_path = Path(data_dir_name) - - repo_config = repo_path / "feature_store.yaml" - - repo_config.write_text( - dedent( - f""" - project: {project_id} - registry: {data_path / "registry.db"} - provider: aws - online_store: - type: dynamodb - region: us-west-2 - """ - ) - ) - - repo_example = repo_path / "example.py" - repo_example.write_text(get_example_repo("example_feature_repo_1.py")) - - result = runner.run(["apply"], cwd=repo_path) - assert result.returncode == 0 - - # Doing another apply should be a no op, and should not cause errors - result = runner.run(["apply"], cwd=repo_path) - assert result.returncode == 0 - - basic_rw_test( - FeatureStore(repo_path=str(repo_path), config=None), - view_name="driver_locations", - ) - - result = runner.run(["teardown"], cwd=repo_path) - assert result.returncode == 0 diff --git a/sdk/python/tests/integration/registration/test_cli_gcp.py b/sdk/python/tests/integration/registration/test_cli_gcp.py deleted file mode 100644 index b4be581088..0000000000 --- a/sdk/python/tests/integration/registration/test_cli_gcp.py +++ /dev/null @@ -1,86 +0,0 @@ -import random -import string -import tempfile -from pathlib import Path -from textwrap import dedent - -import pytest - -from feast.feature_store import FeatureStore -from tests.utils.cli_utils import CliRunner, get_example_repo -from tests.utils.online_read_write_test import basic_rw_test - - -@pytest.mark.integration -def test_basic() -> None: - project_id = "".join( - random.choice(string.ascii_lowercase + string.digits) for _ in range(10) - ) - runner = CliRunner() - with tempfile.TemporaryDirectory() as repo_dir_name, tempfile.TemporaryDirectory() as data_dir_name: - - repo_path = Path(repo_dir_name) - data_path = Path(data_dir_name) - - repo_config = repo_path / "feature_store.yaml" - - repo_config.write_text( - dedent( - f""" - project: {project_id} - registry: {data_path / "registry.db"} - provider: gcp - """ - ) - ) - - repo_example = repo_path / "example.py" - repo_example.write_text(get_example_repo("example_feature_repo_1.py")) - - result = runner.run(["apply"], cwd=repo_path) - assert result.returncode == 0 - - # Doing another apply should be a no op, and should not cause errors - result = runner.run(["apply"], cwd=repo_path) - assert result.returncode == 0 - - basic_rw_test( - FeatureStore(repo_path=str(repo_path), config=None), - view_name="driver_locations", - ) - - result = runner.run(["teardown"], cwd=repo_path) - assert result.returncode == 0 - - -@pytest.mark.integration -def test_missing_bq_source_fail() -> None: - project_id = "".join( - random.choice(string.ascii_lowercase + string.digits) for _ in range(10) - ) - runner = CliRunner() - with tempfile.TemporaryDirectory() as repo_dir_name, tempfile.TemporaryDirectory() as data_dir_name: - - repo_path = Path(repo_dir_name) - data_path = Path(data_dir_name) - - repo_config = repo_path / "feature_store.yaml" - - repo_config.write_text( - dedent( - f""" - project: {project_id} - registry: {data_path / "registry.db"} - provider: gcp - """ - ) - ) - - repo_example = repo_path / "example.py" - repo_example.write_text( - get_example_repo("example_feature_repo_with_missing_bq_source.py") - ) - - returncode, output = runner.run_with_output(["apply"], cwd=repo_path) - assert returncode == 1 - assert b"DataSourceNotFoundException" in output diff --git a/sdk/python/tests/integration/registration/test_cli_local.py b/sdk/python/tests/integration/registration/test_cli_local.py deleted file mode 100644 index 5ec7ea2871..0000000000 --- a/sdk/python/tests/integration/registration/test_cli_local.py +++ /dev/null @@ -1,181 +0,0 @@ -import tempfile -from contextlib import contextmanager -from pathlib import Path -from textwrap import dedent - -import assertpy -import pytest - -from feast.feature_store import FeatureStore -from tests.utils.cli_utils import CliRunner, get_example_repo -from tests.utils.online_read_write_test import basic_rw_test - - -@pytest.mark.integration -def test_workflow() -> None: - """ - Test running apply on a sample repo, and make sure the infra gets created. - """ - runner = CliRunner() - with tempfile.TemporaryDirectory() as repo_dir_name, tempfile.TemporaryDirectory() as data_dir_name: - - # Construct an example repo in a temporary dir - repo_path = Path(repo_dir_name) - data_path = Path(data_dir_name) - - repo_config = repo_path / "feature_store.yaml" - - repo_config.write_text( - dedent( - f""" - project: foo - registry: {data_path / "registry.db"} - provider: local - online_store: - path: {data_path / "online_store.db"} - offline_store: - type: bigquery - """ - ) - ) - - repo_example = repo_path / "example.py" - repo_example.write_text(get_example_repo("example_feature_repo_1.py")) - - result = runner.run(["apply"], cwd=repo_path) - assertpy.assert_that(result.returncode).is_equal_to(0) - - # entity & feature view list commands should succeed - result = runner.run(["entities", "list"], cwd=repo_path) - assertpy.assert_that(result.returncode).is_equal_to(0) - result = runner.run(["feature-views", "list"], cwd=repo_path) - assertpy.assert_that(result.returncode).is_equal_to(0) - - # entity & feature view describe commands should succeed when objects exist - result = runner.run(["entities", "describe", "driver"], cwd=repo_path) - assertpy.assert_that(result.returncode).is_equal_to(0) - result = runner.run( - ["feature-views", "describe", "driver_locations"], cwd=repo_path - ) - assertpy.assert_that(result.returncode).is_equal_to(0) - - # entity & feature view describe commands should fail when objects don't exist - result = runner.run(["entities", "describe", "foo"], cwd=repo_path) - assertpy.assert_that(result.returncode).is_equal_to(1) - result = runner.run(["feature-views", "describe", "foo"], cwd=repo_path) - assertpy.assert_that(result.returncode).is_equal_to(1) - - # Doing another apply should be a no op, and should not cause errors - result = runner.run(["apply"], cwd=repo_path) - assertpy.assert_that(result.returncode).is_equal_to(0) - - basic_rw_test( - FeatureStore(repo_path=str(repo_path), config=None), - view_name="driver_locations", - ) - - result = runner.run(["teardown"], cwd=repo_path) - assertpy.assert_that(result.returncode).is_equal_to(0) - - -@pytest.mark.integration -def test_non_local_feature_repo() -> None: - """ - Test running apply on a sample repo, and make sure the infra gets created. - """ - runner = CliRunner() - with tempfile.TemporaryDirectory() as repo_dir_name: - - # Construct an example repo in a temporary dir - repo_path = Path(repo_dir_name) - - repo_config = repo_path / "feature_store.yaml" - - repo_config.write_text( - dedent( - """ - project: foo - registry: data/registry.db - provider: local - online_store: - path: data/online_store.db - offline_store: - type: bigquery - """ - ) - ) - - repo_example = repo_path / "example.py" - repo_example.write_text(get_example_repo("example_feature_repo_1.py")) - - result = runner.run(["apply"], cwd=repo_path) - assertpy.assert_that(result.returncode).is_equal_to(0) - - fs = FeatureStore(repo_path=str(repo_path)) - assertpy.assert_that(fs.list_feature_views()).is_length(3) - - result = runner.run(["teardown"], cwd=repo_path) - assertpy.assert_that(result.returncode).is_equal_to(0) - - -@contextmanager -def setup_third_party_provider_repo(provider_name: str): - with tempfile.TemporaryDirectory() as repo_dir_name: - - # Construct an example repo in a temporary dir - repo_path = Path(repo_dir_name) - - repo_config = repo_path / "feature_store.yaml" - - repo_config.write_text( - dedent( - f""" - project: foo - registry: data/registry.db - provider: {provider_name} - online_store: - path: data/online_store.db - type: sqlite - offline_store: - type: file - """ - ) - ) - - (repo_path / "foo").mkdir() - repo_example = repo_path / "foo/provider.py" - repo_example.write_text( - (Path(__file__).parents[2] / "foo_provider.py").read_text() - ) - - yield repo_path - - -def test_3rd_party_providers() -> None: - """ - Test running apply on third party providers - """ - runner = CliRunner() - # Check with incorrect built-in provider name (no dots) - with setup_third_party_provider_repo("feast123") as repo_path: - return_code, output = runner.run_with_output(["apply"], cwd=repo_path) - assertpy.assert_that(return_code).is_equal_to(1) - assertpy.assert_that(output).contains(b"Provider 'feast123' is not implemented") - # Check with incorrect third-party provider name (with dots) - with setup_third_party_provider_repo("feast_foo.Provider") as repo_path: - return_code, output = runner.run_with_output(["apply"], cwd=repo_path) - assertpy.assert_that(return_code).is_equal_to(1) - assertpy.assert_that(output).contains( - b"Could not import Provider module 'feast_foo'" - ) - # Check with incorrect third-party provider name (with dots) - with setup_third_party_provider_repo("foo.FooProvider") as repo_path: - return_code, output = runner.run_with_output(["apply"], cwd=repo_path) - assertpy.assert_that(return_code).is_equal_to(1) - assertpy.assert_that(output).contains( - b"Could not import Provider 'FooProvider' from module 'foo'" - ) - # Check with correct third-party provider name - with setup_third_party_provider_repo("foo.provider.FooProvider") as repo_path: - return_code, output = runner.run_with_output(["apply"], cwd=repo_path) - assertpy.assert_that(return_code).is_equal_to(0) diff --git a/sdk/python/tests/integration/registration/test_cli_redis.py b/sdk/python/tests/integration/registration/test_cli_redis.py deleted file mode 100644 index a4b146a29c..0000000000 --- a/sdk/python/tests/integration/registration/test_cli_redis.py +++ /dev/null @@ -1,101 +0,0 @@ -import random -import string -import tempfile -from pathlib import Path -from textwrap import dedent - -import pytest -import redis - -from feast.feature_store import FeatureStore -from tests.utils.cli_utils import CliRunner, get_example_repo -from tests.utils.online_read_write_test import basic_rw_test - - -@pytest.mark.integration -def test_basic() -> None: - project_id = "".join( - random.choice(string.ascii_lowercase + string.digits) for _ in range(10) - ) - runner = CliRunner() - with tempfile.TemporaryDirectory() as repo_dir_name, tempfile.TemporaryDirectory() as data_dir_name: - - repo_path = Path(repo_dir_name) - data_path = Path(data_dir_name) - - repo_config = repo_path / "feature_store.yaml" - - repo_config.write_text( - dedent( - f""" - project: {project_id} - registry: {data_path / "registry.db"} - provider: local - offline_store: - type: bigquery - online_store: - type: redis - connection_string: localhost:6379,db=0 - """ - ) - ) - - repo_example = repo_path / "example.py" - repo_example.write_text(get_example_repo("example_feature_repo_1.py")) - - result = runner.run(["apply"], cwd=repo_path) - assert result.returncode == 0 - - # Doing another apply should be a no op, and should not cause errors - result = runner.run(["apply"], cwd=repo_path) - assert result.returncode == 0 - - basic_rw_test( - FeatureStore(repo_path=str(repo_path), config=None), - view_name="driver_locations", - ) - - result = runner.run(["teardown"], cwd=repo_path) - assert result.returncode == 0 - - -@pytest.mark.integration -def test_connection_error() -> None: - project_id = "".join( - random.choice(string.ascii_lowercase + string.digits) for _ in range(10) - ) - runner = CliRunner() - with tempfile.TemporaryDirectory() as repo_dir_name, tempfile.TemporaryDirectory() as data_dir_name: - - repo_path = Path(repo_dir_name) - data_path = Path(data_dir_name) - - repo_config = repo_path / "feature_store.yaml" - - repo_config.write_text( - dedent( - f""" - project: {project_id} - registry: {data_path / "registry.db"} - provider: local - offline_store: - type: file - online_store: - type: redis - connection_string: localhost:6379,db=0= - """ - ) - ) - - repo_example = repo_path / "example.py" - repo_example.write_text(get_example_repo("example_feature_repo_2.py")) - - result = runner.run(["apply"], cwd=repo_path) - assert result.returncode == 0 - - # Redis does not support names for its databases. - with pytest.raises(redis.exceptions.ResponseError): - basic_rw_test( - FeatureStore(repo_path=str(repo_path), config=None), - view_name="driver_hourly_stats", - ) diff --git a/sdk/python/tests/integration/registration/test_feature_service_apply.py b/sdk/python/tests/integration/registration/test_feature_service_apply.py new file mode 100644 index 0000000000..7824f6333e --- /dev/null +++ b/sdk/python/tests/integration/registration/test_feature_service_apply.py @@ -0,0 +1,29 @@ +import pytest + +from feast import FeatureService +from tests.utils.cli_utils import CliRunner, get_example_repo + + +@pytest.mark.integration +def test_read_pre_applied() -> None: + """ + Read feature values from the FeatureStore using a FeatureService. + """ + runner = CliRunner() + with runner.local_repo( + get_example_repo("example_feature_repo_1.py"), "bigquery" + ) as store: + + assert len(store.list_feature_services()) == 1 + fs = store.get_feature_service("driver_locations_service") + assert len(fs.tags) == 1 + assert fs.tags["release"] == "production" + + fv = store.get_feature_view("driver_locations") + + fs = FeatureService(name="new_feature_service", features=[fv[["lon"]]]) + + store.apply([fs]) + + assert len(store.list_feature_services()) == 2 + store.get_feature_service("new_feature_service") diff --git a/sdk/python/tests/integration/registration/test_feature_store.py b/sdk/python/tests/integration/registration/test_feature_store.py index b842adcc77..db4c6700ce 100644 --- a/sdk/python/tests/integration/registration/test_feature_store.py +++ b/sdk/python/tests/integration/registration/test_feature_store.py @@ -21,19 +21,20 @@ from feast import FileSource from feast.data_format import ParquetFormat from feast.entity import Entity -from feast.feature import Feature from feast.feature_store import FeatureStore from feast.feature_view import FeatureView +from feast.field import Field from feast.infra.offline_stores.file import FileOfflineStoreConfig from feast.infra.online_stores.dynamodb import DynamoDBOnlineStoreConfig from feast.infra.online_stores.sqlite import SqliteOnlineStoreConfig from feast.protos.feast.types import Value_pb2 as ValueProto from feast.repo_config import RepoConfig +from feast.types import Array, Bytes, Float64, Int64, String from feast.value_type import ValueType from tests.utils.data_source_utils import ( prep_file_source, simple_bq_source_using_query_arg, - simple_bq_source_using_table_ref_arg, + simple_bq_source_using_table_arg, ) @@ -95,7 +96,7 @@ def test_apply_entity_success(test_feature_store): name="driver_car_id", description="Car driver id", value_type=ValueType.STRING, - labels={"team": "matchmaking"}, + tags={"team": "matchmaking"}, ) # Register Entity @@ -109,10 +110,12 @@ def test_apply_entity_success(test_feature_store): and entity.name == "driver_car_id" and entity.value_type == ValueType(ValueProto.ValueType.STRING) and entity.description == "Car driver id" - and "team" in entity.labels - and entity.labels["team"] == "matchmaking" + and "team" in entity.tags + and entity.tags["team"] == "matchmaking" ) + test_feature_store.teardown() + @pytest.mark.integration @pytest.mark.parametrize( @@ -127,7 +130,7 @@ def test_apply_entity_integration(test_feature_store): name="driver_car_id", description="Car driver id", value_type=ValueType.STRING, - labels={"team": "matchmaking"}, + tags={"team": "matchmaking"}, ) # Register Entity @@ -141,8 +144,8 @@ def test_apply_entity_integration(test_feature_store): and entity.name == "driver_car_id" and entity.value_type == ValueType(ValueProto.ValueType.STRING) and entity.description == "Car driver id" - and "team" in entity.labels - and entity.labels["team"] == "matchmaking" + and "team" in entity.tags + and entity.tags["team"] == "matchmaking" ) entity = test_feature_store.get_entity("driver_car_id") @@ -150,10 +153,12 @@ def test_apply_entity_integration(test_feature_store): entity.name == "driver_car_id" and entity.value_type == ValueType(ValueProto.ValueType.STRING) and entity.description == "Car driver id" - and "team" in entity.labels - and entity.labels["team"] == "matchmaking" + and "team" in entity.tags + and entity.tags["team"] == "matchmaking" ) + test_feature_store.teardown() + @pytest.mark.parametrize( "test_feature_store", [lazy_fixture("feature_store_with_local_registry")], @@ -162,23 +167,23 @@ def test_apply_feature_view_success(test_feature_store): # Create Feature Views batch_source = FileSource( file_format=ParquetFormat(), - file_url="file://feast/*", - event_timestamp_column="ts_col", + path="file://feast/*", + timestamp_field="ts_col", created_timestamp_column="timestamp", date_partition_column="date_partition_col", ) fv1 = FeatureView( name="my_feature_view_1", - features=[ - Feature(name="fs1_my_feature_1", dtype=ValueType.INT64), - Feature(name="fs1_my_feature_2", dtype=ValueType.STRING), - Feature(name="fs1_my_feature_3", dtype=ValueType.STRING_LIST), - Feature(name="fs1_my_feature_4", dtype=ValueType.BYTES_LIST), + schema=[ + Field(name="fs1_my_feature_1", dtype=Int64), + Field(name="fs1_my_feature_2", dtype=String), + Field(name="fs1_my_feature_3", dtype=Array(String)), + Field(name="fs1_my_feature_4", dtype=Array(Bytes)), ], entities=["fs1_my_entity_1"], tags={"team": "matchmaking"}, - input=batch_source, + batch_source=batch_source, ttl=timedelta(minutes=5), ) @@ -192,16 +197,18 @@ def test_apply_feature_view_success(test_feature_store): len(feature_views) == 1 and feature_views[0].name == "my_feature_view_1" and feature_views[0].features[0].name == "fs1_my_feature_1" - and feature_views[0].features[0].dtype == ValueType.INT64 + and feature_views[0].features[0].dtype == Int64 and feature_views[0].features[1].name == "fs1_my_feature_2" - and feature_views[0].features[1].dtype == ValueType.STRING + and feature_views[0].features[1].dtype == String and feature_views[0].features[2].name == "fs1_my_feature_3" - and feature_views[0].features[2].dtype == ValueType.STRING_LIST + and feature_views[0].features[2].dtype == Array(String) and feature_views[0].features[3].name == "fs1_my_feature_4" - and feature_views[0].features[3].dtype == ValueType.BYTES_LIST + and feature_views[0].features[3].dtype == Array(Bytes) and feature_views[0].entities[0] == "fs1_my_entity_1" ) + test_feature_store.teardown() + @pytest.mark.integration @pytest.mark.parametrize( @@ -209,15 +216,17 @@ def test_apply_feature_view_success(test_feature_store): ) @pytest.mark.parametrize("dataframe_source", [lazy_fixture("simple_dataset_1")]) def test_feature_view_inference_success(test_feature_store, dataframe_source): - with prep_file_source( - df=dataframe_source, event_timestamp_column="ts_1" - ) as file_source: + with prep_file_source(df=dataframe_source, timestamp_field="ts_1") as file_source: + entity = Entity( + name="id", join_keys=["id_join_key"], value_type=ValueType.INT64 + ) + fv1 = FeatureView( name="fv1", entities=["id"], ttl=timedelta(minutes=5), online=True, - input=file_source, + batch_source=file_source, tags={}, ) @@ -226,7 +235,7 @@ def test_feature_view_inference_success(test_feature_store, dataframe_source): entities=["id"], ttl=timedelta(minutes=5), online=True, - input=simple_bq_source_using_table_ref_arg(dataframe_source, "ts_1"), + batch_source=simple_bq_source_using_table_arg(dataframe_source, "ts_1"), tags={}, ) @@ -235,11 +244,11 @@ def test_feature_view_inference_success(test_feature_store, dataframe_source): entities=["id"], ttl=timedelta(minutes=5), online=True, - input=simple_bq_source_using_query_arg(dataframe_source, "ts_1"), + batch_source=simple_bq_source_using_query_arg(dataframe_source, "ts_1"), tags={}, ) - test_feature_store.apply([fv1, fv2, fv3]) # Register Feature Views + test_feature_store.apply([entity, fv1, fv2, fv3]) # Register Feature Views feature_view_1 = test_feature_store.list_feature_views()[0] feature_view_2 = test_feature_store.list_feature_views()[1] feature_view_3 = test_feature_store.list_feature_views()[2] @@ -247,25 +256,27 @@ def test_feature_view_inference_success(test_feature_store, dataframe_source): actual_file_source = { (feature.name, feature.dtype) for feature in feature_view_1.features } - actual_bq_using_table_ref_arg_source = { + actual_bq_using_table_arg_source = { (feature.name, feature.dtype) for feature in feature_view_2.features } actual_bq_using_query_arg_source = { (feature.name, feature.dtype) for feature in feature_view_3.features } expected = { - ("float_col", ValueType.DOUBLE), - ("int64_col", ValueType.INT64), - ("string_col", ValueType.STRING), + ("float_col", Float64), + ("int64_col", Int64), + ("string_col", String), } assert ( expected == actual_file_source - == actual_bq_using_table_ref_arg_source + == actual_bq_using_table_arg_source == actual_bq_using_query_arg_source ) + test_feature_store.teardown() + @pytest.mark.integration @pytest.mark.parametrize( @@ -279,23 +290,23 @@ def test_apply_feature_view_integration(test_feature_store): # Create Feature Views batch_source = FileSource( file_format=ParquetFormat(), - file_url="file://feast/*", - event_timestamp_column="ts_col", + path="file://feast/*", + timestamp_field="ts_col", created_timestamp_column="timestamp", date_partition_column="date_partition_col", ) fv1 = FeatureView( name="my_feature_view_1", - features=[ - Feature(name="fs1_my_feature_1", dtype=ValueType.INT64), - Feature(name="fs1_my_feature_2", dtype=ValueType.STRING), - Feature(name="fs1_my_feature_3", dtype=ValueType.STRING_LIST), - Feature(name="fs1_my_feature_4", dtype=ValueType.BYTES_LIST), + schema=[ + Field(name="fs1_my_feature_1", dtype=Int64), + Field(name="fs1_my_feature_2", dtype=String), + Field(name="fs1_my_feature_3", dtype=Array(String)), + Field(name="fs1_my_feature_4", dtype=Array(Bytes)), ], entities=["fs1_my_entity_1"], tags={"team": "matchmaking"}, - input=batch_source, + batch_source=batch_source, ttl=timedelta(minutes=5), ) @@ -309,13 +320,13 @@ def test_apply_feature_view_integration(test_feature_store): len(feature_views) == 1 and feature_views[0].name == "my_feature_view_1" and feature_views[0].features[0].name == "fs1_my_feature_1" - and feature_views[0].features[0].dtype == ValueType.INT64 + and feature_views[0].features[0].dtype == Int64 and feature_views[0].features[1].name == "fs1_my_feature_2" - and feature_views[0].features[1].dtype == ValueType.STRING + and feature_views[0].features[1].dtype == String and feature_views[0].features[2].name == "fs1_my_feature_3" - and feature_views[0].features[2].dtype == ValueType.STRING_LIST + and feature_views[0].features[2].dtype == Array(String) and feature_views[0].features[3].name == "fs1_my_feature_4" - and feature_views[0].features[3].dtype == ValueType.BYTES_LIST + and feature_views[0].features[3].dtype == Array(Bytes) and feature_views[0].entities[0] == "fs1_my_entity_1" ) @@ -323,13 +334,13 @@ def test_apply_feature_view_integration(test_feature_store): assert ( feature_view.name == "my_feature_view_1" and feature_view.features[0].name == "fs1_my_feature_1" - and feature_view.features[0].dtype == ValueType.INT64 + and feature_view.features[0].dtype == Int64 and feature_view.features[1].name == "fs1_my_feature_2" - and feature_view.features[1].dtype == ValueType.STRING + and feature_view.features[1].dtype == String and feature_view.features[2].name == "fs1_my_feature_3" - and feature_view.features[2].dtype == ValueType.STRING_LIST + and feature_view.features[2].dtype == Array(String) and feature_view.features[3].name == "fs1_my_feature_4" - and feature_view.features[3].dtype == ValueType.BYTES_LIST + and feature_view.features[3].dtype == Array(Bytes) and feature_view.entities[0] == "fs1_my_entity_1" ) @@ -337,6 +348,8 @@ def test_apply_feature_view_integration(test_feature_store): feature_views = test_feature_store.list_feature_views() assert len(feature_views) == 0 + test_feature_store.teardown() + @pytest.mark.parametrize( "test_feature_store", [lazy_fixture("feature_store_with_local_registry")], @@ -346,8 +359,8 @@ def test_apply_object_and_read(test_feature_store): # Create Feature Views batch_source = FileSource( file_format=ParquetFormat(), - file_url="file://feast/*", - event_timestamp_column="ts_col", + path="file://feast/*", + timestamp_field="ts_col", created_timestamp_column="timestamp", ) @@ -361,29 +374,29 @@ def test_apply_object_and_read(test_feature_store): fv1 = FeatureView( name="my_feature_view_1", - features=[ - Feature(name="fs1_my_feature_1", dtype=ValueType.INT64), - Feature(name="fs1_my_feature_2", dtype=ValueType.STRING), - Feature(name="fs1_my_feature_3", dtype=ValueType.STRING_LIST), - Feature(name="fs1_my_feature_4", dtype=ValueType.BYTES_LIST), + schema=[ + Field(name="fs1_my_feature_1", dtype=Int64), + Field(name="fs1_my_feature_2", dtype=String), + Field(name="fs1_my_feature_3", dtype=Array(String)), + Field(name="fs1_my_feature_4", dtype=Array(Bytes)), ], entities=["fs1_my_entity_1"], tags={"team": "matchmaking"}, - input=batch_source, + batch_source=batch_source, ttl=timedelta(minutes=5), ) fv2 = FeatureView( name="my_feature_view_2", - features=[ - Feature(name="fs1_my_feature_1", dtype=ValueType.INT64), - Feature(name="fs1_my_feature_2", dtype=ValueType.STRING), - Feature(name="fs1_my_feature_3", dtype=ValueType.STRING_LIST), - Feature(name="fs1_my_feature_4", dtype=ValueType.BYTES_LIST), + schema=[ + Field(name="fs1_my_feature_1", dtype=Int64), + Field(name="fs1_my_feature_2", dtype=String), + Field(name="fs1_my_feature_3", dtype=Array(String)), + Field(name="fs1_my_feature_4", dtype=Array(Bytes)), ], entities=["fs1_my_entity_1"], tags={"team": "matchmaking"}, - input=batch_source, + batch_source=batch_source, ttl=timedelta(minutes=5), ) @@ -398,6 +411,8 @@ def test_apply_object_and_read(test_feature_store): assert fv2 != fv1_actual assert e2 != e1_actual + test_feature_store.teardown() + def test_apply_remote_repo(): fd, registry_path = mkstemp() @@ -417,18 +432,16 @@ def test_apply_remote_repo(): ) @pytest.mark.parametrize("dataframe_source", [lazy_fixture("simple_dataset_1")]) def test_reapply_feature_view_success(test_feature_store, dataframe_source): - with prep_file_source( - df=dataframe_source, event_timestamp_column="ts_1" - ) as file_source: + with prep_file_source(df=dataframe_source, timestamp_field="ts_1") as file_source: - e = Entity(name="id", value_type=ValueType.STRING) + e = Entity(name="id", join_keys=["id_join_key"], value_type=ValueType.STRING) # Create Feature View fv1 = FeatureView( name="my_feature_view_1", - features=[Feature(name="string_col", dtype=ValueType.STRING)], + schema=[Field(name="string_col", dtype=String)], entities=["id"], - input=file_source, + batch_source=file_source, ttl=timedelta(minutes=5), ) @@ -456,9 +469,9 @@ def test_reapply_feature_view_success(test_feature_store, dataframe_source): # Change and apply Feature View fv1 = FeatureView( name="my_feature_view_1", - features=[Feature(name="int64_col", dtype=ValueType.INT64)], + schema=[Field(name="int64_col", dtype=Int64)], entities=["id"], - input=file_source, + batch_source=file_source, ttl=timedelta(minutes=5), ) test_feature_store.apply([fv1]) @@ -466,3 +479,39 @@ def test_reapply_feature_view_success(test_feature_store, dataframe_source): # Check Feature View fv_stored = test_feature_store.get_feature_view(fv1.name) assert len(fv_stored.materialization_intervals) == 0 + + test_feature_store.teardown() + + +def test_apply_conflicting_featureview_names(feature_store_with_local_registry): + """Test applying feature views with non-case-insensitively unique names""" + + driver_stats = FeatureView( + name="driver_hourly_stats", + entities=["driver_id"], + ttl=timedelta(seconds=10), + online=False, + batch_source=FileSource(path="driver_stats.parquet"), + tags={}, + ) + + customer_stats = FeatureView( + name="DRIVER_HOURLY_STATS", + entities=["id"], + ttl=timedelta(seconds=10), + online=False, + batch_source=FileSource(path="customer_stats.parquet"), + tags={}, + ) + try: + feature_store_with_local_registry.apply([driver_stats, customer_stats]) + error = None + except ValueError as e: + error = e + assert ( + isinstance(error, ValueError) + and "Please ensure that all feature view names are case-insensitively unique" + in error.args[0] + ) + + feature_store_with_local_registry.teardown() diff --git a/sdk/python/tests/integration/registration/test_inference.py b/sdk/python/tests/integration/registration/test_inference.py index cff5f33f74..6305468c09 100644 --- a/sdk/python/tests/integration/registration/test_inference.py +++ b/sdk/python/tests/integration/registration/test_inference.py @@ -1,33 +1,60 @@ +from copy import deepcopy + +import pandas as pd import pytest -from utils.data_source_utils import ( - prep_file_source, - simple_bq_source_using_query_arg, - simple_bq_source_using_table_ref_arg, -) -from feast import Entity, RepoConfig, ValueType -from feast.errors import RegistryInferenceFailure +from feast import ( + BigQuerySource, + Entity, + Feature, + FileSource, + RedshiftSource, + RepoConfig, + SnowflakeSource, + ValueType, +) +from feast.data_source import RequestSource +from feast.errors import ( + DataSourceNoNameException, + RegistryInferenceFailure, + SpecifiedFeaturesNotPresentError, +) from feast.feature_view import FeatureView +from feast.field import Field from feast.inference import ( update_data_sources_with_inferred_event_timestamp_col, update_entities_with_inferred_types_from_feature_views, ) +from feast.infra.offline_stores.contrib.spark_offline_store.spark_source import ( + SparkSource, +) +from feast.on_demand_feature_view import on_demand_feature_view +from feast.types import PrimitiveFeastType, String, UnixTimestamp +from tests.utils.data_source_utils import ( + prep_file_source, + simple_bq_source_using_query_arg, + simple_bq_source_using_table_arg, +) def test_update_entities_with_inferred_types_from_feature_views( simple_dataset_1, simple_dataset_2 ): with prep_file_source( - df=simple_dataset_1, event_timestamp_column="ts_1" + df=simple_dataset_1, timestamp_field="ts_1" ) as file_source, prep_file_source( - df=simple_dataset_2, event_timestamp_column="ts_1" + df=simple_dataset_2, timestamp_field="ts_1" ) as file_source_2: - fv1 = FeatureView(name="fv1", entities=["id"], input=file_source, ttl=None,) - fv2 = FeatureView(name="fv2", entities=["id"], input=file_source_2, ttl=None,) + fv1 = FeatureView( + name="fv1", entities=["id"], batch_source=file_source, ttl=None, + ) + fv2 = FeatureView( + name="fv2", entities=["id"], batch_source=file_source_2, ttl=None, + ) - actual_1 = Entity(name="id") - actual_2 = Entity(name="id") + actual_1 = Entity(name="id", join_keys=["id_join_key"]) + actual_2 = Entity(name="id", join_keys=["id_join_key"]) update_entities_with_inferred_types_from_feature_views( [actual_1], [fv1], RepoConfig(provider="local", project="test") @@ -35,41 +62,226 @@ def test_update_entities_with_inferred_types_from_feature_views( update_entities_with_inferred_types_from_feature_views( [actual_2], [fv2], RepoConfig(provider="local", project="test") ) - assert actual_1 == Entity(name="id", value_type=ValueType.INT64) - assert actual_2 == Entity(name="id", value_type=ValueType.STRING) + assert actual_1 == Entity( + name="id", join_keys=["id_join_key"], value_type=ValueType.INT64 + ) + assert actual_2 == Entity( + name="id", join_keys=["id_join_key"], value_type=ValueType.STRING + ) with pytest.raises(RegistryInferenceFailure): # two viable data types update_entities_with_inferred_types_from_feature_views( - [Entity(name="id")], + [Entity(name="id", join_keys=["id_join_key"])], [fv1, fv2], RepoConfig(provider="local", project="test"), ) +def test_infer_datasource_names_file(): + file_path = "path/to/test.csv" + data_source = FileSource(path=file_path) + assert data_source.name == file_path + + source_name = "my_name" + data_source = FileSource(name=source_name, path=file_path) + assert data_source.name == source_name + + +def test_infer_datasource_names_dwh(): + table = "project.table" + dwh_classes = [BigQuerySource, RedshiftSource, SnowflakeSource, SparkSource] + + for dwh_class in dwh_classes: + data_source = dwh_class(table=table) + assert data_source.name == table + + source_name = "my_name" + data_source_with_table = dwh_class(name=source_name, table=table) + assert data_source_with_table.name == source_name + data_source_with_query = dwh_class( + name=source_name, query=f"SELECT * from {table}" + ) + assert data_source_with_query.name == source_name + + # If we have a query and no name, throw an error + if dwh_class == SparkSource: + with pytest.raises(DataSourceNoNameException): + print(f"Testing dwh {dwh_class}") + data_source = dwh_class(query="test_query") + else: + data_source = dwh_class(query="test_query") + assert data_source.name == "" + + @pytest.mark.integration -def test_update_data_sources_with_inferred_event_timestamp_col(simple_dataset_1): +def test_update_file_data_source_with_inferred_event_timestamp_col(simple_dataset_1): df_with_two_viable_timestamp_cols = simple_dataset_1.copy(deep=True) df_with_two_viable_timestamp_cols["ts_2"] = simple_dataset_1["ts_1"] with prep_file_source(df=simple_dataset_1) as file_source: data_sources = [ file_source, - simple_bq_source_using_table_ref_arg(simple_dataset_1), + simple_bq_source_using_table_arg(simple_dataset_1), simple_bq_source_using_query_arg(simple_dataset_1), ] update_data_sources_with_inferred_event_timestamp_col( data_sources, RepoConfig(provider="local", project="test") ) actual_event_timestamp_cols = [ - source.event_timestamp_column for source in data_sources + source.timestamp_field for source in data_sources ] assert actual_event_timestamp_cols == ["ts_1", "ts_1", "ts_1"] with prep_file_source(df=df_with_two_viable_timestamp_cols) as file_source: with pytest.raises(RegistryInferenceFailure): - # two viable event_timestamp_columns + # two viable timestamp_fields update_data_sources_with_inferred_event_timestamp_col( [file_source], RepoConfig(provider="local", project="test") ) + + +@pytest.mark.integration +@pytest.mark.universal +def test_update_data_sources_with_inferred_event_timestamp_col(universal_data_sources): + (_, _, data_sources) = universal_data_sources + data_sources_copy = deepcopy(data_sources) + + # remove defined timestamp_field to allow for inference + for data_source in data_sources_copy.values(): + data_source.timestamp_field = None + data_source.event_timestamp_column = None + + update_data_sources_with_inferred_event_timestamp_col( + data_sources_copy.values(), RepoConfig(provider="local", project="test"), + ) + actual_event_timestamp_cols = [ + source.timestamp_field for source in data_sources_copy.values() + ] + + assert actual_event_timestamp_cols == ["event_timestamp"] * len( + data_sources_copy.values() + ) + + +def test_on_demand_features_type_inference(): + # Create Feature Views + date_request = RequestSource( + name="date_request", + schema=[Field(name="some_date", dtype=PrimitiveFeastType.UNIX_TIMESTAMP)], + ) + + @on_demand_feature_view( + sources={"date_request": date_request}, + features=[ + Feature(name="output", dtype=ValueType.UNIX_TIMESTAMP), + Feature(name="string_output", dtype=ValueType.STRING), + ], + ) + def test_view(features_df: pd.DataFrame) -> pd.DataFrame: + data = pd.DataFrame() + data["output"] = features_df["some_date"] + data["string_output"] = features_df["some_date"].astype(pd.StringDtype()) + return data + + test_view.infer_features() + + @on_demand_feature_view( + # Note: we deliberately use `inputs` instead of `sources` to test that `inputs` + # still works correctly, even though it is deprecated. + # TODO(felixwang9817): Remove references to `inputs` once it is fully deprecated. + inputs={"date_request": date_request}, + features=[ + Feature(name="output", dtype=ValueType.UNIX_TIMESTAMP), + Feature(name="object_output", dtype=ValueType.STRING), + ], + ) + def invalid_test_view(features_df: pd.DataFrame) -> pd.DataFrame: + data = pd.DataFrame() + data["output"] = features_df["some_date"] + data["object_output"] = features_df["some_date"].astype(str) + return data + + with pytest.raises(ValueError, match="Value with native type object"): + invalid_test_view.infer_features() + + @on_demand_feature_view( + # Note: we deliberately use positional arguments here to test that they work correctly, + # even though positional arguments are deprecated in favor of keyword arguments. + # TODO(felixwang9817): Remove positional arguments once they are fully deprecated. + [ + Feature(name="output", dtype=ValueType.UNIX_TIMESTAMP), + Feature(name="missing", dtype=ValueType.STRING), + ], + {"date_request": date_request}, + ) + def test_view_with_missing_feature(features_df: pd.DataFrame) -> pd.DataFrame: + data = pd.DataFrame() + data["output"] = features_df["some_date"] + return data + + with pytest.raises(SpecifiedFeaturesNotPresentError): + test_view_with_missing_feature.infer_features() + + +# TODO(kevjumba): remove this in feast 0.23 when deprecating +@pytest.mark.parametrize( + "request_source_schema", + [ + [Field(name="some_date", dtype=UnixTimestamp)], + {"some_date": ValueType.UNIX_TIMESTAMP}, + ], +) +def test_datasource_inference(request_source_schema): + # Create Feature Views + date_request = RequestSource(name="date_request", schema=request_source_schema,) + + @on_demand_feature_view( + # Note: we deliberately use positional arguments here to test that they work correctly, + # even though positional arguments are deprecated in favor of keyword arguments. + # TODO(felixwang9817): Remove positional arguments once they are fully deprecated. + [ + Feature(name="output", dtype=ValueType.UNIX_TIMESTAMP), + Feature(name="string_output", dtype=ValueType.STRING), + ], + sources={"date_request": date_request}, + ) + def test_view(features_df: pd.DataFrame) -> pd.DataFrame: + data = pd.DataFrame() + data["output"] = features_df["some_date"] + data["string_output"] = features_df["some_date"].astype(pd.StringDtype()) + return data + + test_view.infer_features() + + @on_demand_feature_view( + sources={"date_request": date_request}, + schema=[ + Field(name="output", dtype=UnixTimestamp), + Field(name="object_output", dtype=String), + ], + ) + def invalid_test_view(features_df: pd.DataFrame) -> pd.DataFrame: + data = pd.DataFrame() + data["output"] = features_df["some_date"] + data["object_output"] = features_df["some_date"].astype(str) + return data + + with pytest.raises(ValueError, match="Value with native type object"): + invalid_test_view.infer_features() + + @on_demand_feature_view( + sources={"date_request": date_request}, + features=[ + Feature(name="output", dtype=ValueType.UNIX_TIMESTAMP), + Feature(name="missing", dtype=ValueType.STRING), + ], + ) + def test_view_with_missing_feature(features_df: pd.DataFrame) -> pd.DataFrame: + data = pd.DataFrame() + data["output"] = features_df["some_date"] + return data + + with pytest.raises(SpecifiedFeaturesNotPresentError): + test_view_with_missing_feature.infer_features() diff --git a/sdk/python/tests/integration/registration/test_registry.py b/sdk/python/tests/integration/registration/test_registry.py index 82b0f51038..5f72fb7125 100644 --- a/sdk/python/tests/integration/registration/test_registry.py +++ b/sdk/python/tests/integration/registration/test_registry.py @@ -15,6 +15,7 @@ from datetime import timedelta from tempfile import mkstemp +import pandas as pd import pytest from pytest_lazyfixture import lazy_fixture @@ -23,19 +24,24 @@ from feast.entity import Entity from feast.feature import Feature from feast.feature_view import FeatureView +from feast.field import Field +from feast.on_demand_feature_view import RequestSource, on_demand_feature_view from feast.protos.feast.types import Value_pb2 as ValueProto from feast.registry import Registry +from feast.repo_config import RegistryConfig +from feast.types import Array, Bytes, Float32, Int32, Int64, String from feast.value_type import ValueType @pytest.fixture -def local_registry(): +def local_registry() -> Registry: fd, registry_path = mkstemp() - return Registry(registry_path, None, timedelta(600)) + registry_config = RegistryConfig(path=registry_path, cache_ttl_seconds=600) + return Registry(registry_config, None) @pytest.fixture -def gcs_registry(): +def gcs_registry() -> Registry: from google.cloud import storage storage_client = storage.Client() @@ -47,16 +53,19 @@ def gcs_registry(): ) # delete buckets automatically after 14 days bucket.patch() bucket.blob("registry.db") - return Registry(f"gs://{bucket_name}/registry.db", None, timedelta(600)) + registry_config = RegistryConfig( + path=f"gs://{bucket_name}/registry.db", cache_ttl_seconds=600 + ) + return Registry(registry_config, None) @pytest.fixture -def s3_registry(): - return Registry( - f"s3://feast-integration-tests/registries/{int(time.time() * 1000)}/registry.db", - None, - timedelta(600), +def s3_registry() -> Registry: + registry_config = RegistryConfig( + path=f"s3://feast-integration-tests/registries/{int(time.time() * 1000)}/registry.db", + cache_ttl_seconds=600, ) + return Registry(registry_config, None) @pytest.mark.parametrize( @@ -67,7 +76,7 @@ def test_apply_entity_success(test_registry): name="driver_car_id", description="Car driver id", value_type=ValueType.STRING, - labels={"team": "matchmaking"}, + tags={"team": "matchmaking"}, ) project = "project" @@ -83,8 +92,8 @@ def test_apply_entity_success(test_registry): and entity.name == "driver_car_id" and entity.value_type == ValueType(ValueProto.ValueType.STRING) and entity.description == "Car driver id" - and "team" in entity.labels - and entity.labels["team"] == "matchmaking" + and "team" in entity.tags + and entity.tags["team"] == "matchmaking" ) entity = test_registry.get_entity("driver_car_id", project) @@ -92,10 +101,20 @@ def test_apply_entity_success(test_registry): entity.name == "driver_car_id" and entity.value_type == ValueType(ValueProto.ValueType.STRING) and entity.description == "Car driver id" - and "team" in entity.labels - and entity.labels["team"] == "matchmaking" + and "team" in entity.tags + and entity.tags["team"] == "matchmaking" ) + test_registry.delete_entity("driver_car_id", project) + entities = test_registry.list_entities(project) + assert len(entities) == 0 + + test_registry.teardown() + + # Will try to reload registry, which will fail because the file has been deleted + with pytest.raises(FileNotFoundError): + test_registry._get_registry_proto() + @pytest.mark.integration @pytest.mark.parametrize( @@ -106,7 +125,7 @@ def test_apply_entity_integration(test_registry): name="driver_car_id", description="Car driver id", value_type=ValueType.STRING, - labels={"team": "matchmaking"}, + tags={"team": "matchmaking"}, ) project = "project" @@ -122,8 +141,8 @@ def test_apply_entity_integration(test_registry): and entity.name == "driver_car_id" and entity.value_type == ValueType(ValueProto.ValueType.STRING) and entity.description == "Car driver id" - and "team" in entity.labels - and entity.labels["team"] == "matchmaking" + and "team" in entity.tags + and entity.tags["team"] == "matchmaking" ) entity = test_registry.get_entity("driver_car_id", project) @@ -131,10 +150,16 @@ def test_apply_entity_integration(test_registry): entity.name == "driver_car_id" and entity.value_type == ValueType(ValueProto.ValueType.STRING) and entity.description == "Car driver id" - and "team" in entity.labels - and entity.labels["team"] == "matchmaking" + and "team" in entity.tags + and entity.tags["team"] == "matchmaking" ) + test_registry.teardown() + + # Will try to reload registry, which will fail because the file has been deleted + with pytest.raises(FileNotFoundError): + test_registry._get_registry_proto() + @pytest.mark.parametrize( "test_registry", [lazy_fixture("local_registry")], @@ -143,23 +168,22 @@ def test_apply_feature_view_success(test_registry): # Create Feature Views batch_source = FileSource( file_format=ParquetFormat(), - file_url="file://feast/*", - event_timestamp_column="ts_col", + path="file://feast/*", + timestamp_field="ts_col", created_timestamp_column="timestamp", - date_partition_column="date_partition_col", ) fv1 = FeatureView( name="my_feature_view_1", - features=[ - Feature(name="fs1_my_feature_1", dtype=ValueType.INT64), - Feature(name="fs1_my_feature_2", dtype=ValueType.STRING), - Feature(name="fs1_my_feature_3", dtype=ValueType.STRING_LIST), - Feature(name="fs1_my_feature_4", dtype=ValueType.BYTES_LIST), + schema=[ + Field(name="fs1_my_feature_1", dtype=Int64), + Field(name="fs1_my_feature_2", dtype=String), + Field(name="fs1_my_feature_3", dtype=Array(String)), + Field(name="fs1_my_feature_4", dtype=Array(Bytes)), ], entities=["fs1_my_entity_1"], tags={"team": "matchmaking"}, - input=batch_source, + batch_source=batch_source, ttl=timedelta(minutes=5), ) @@ -175,13 +199,13 @@ def test_apply_feature_view_success(test_registry): len(feature_views) == 1 and feature_views[0].name == "my_feature_view_1" and feature_views[0].features[0].name == "fs1_my_feature_1" - and feature_views[0].features[0].dtype == ValueType.INT64 + and feature_views[0].features[0].dtype == Int64 and feature_views[0].features[1].name == "fs1_my_feature_2" - and feature_views[0].features[1].dtype == ValueType.STRING + and feature_views[0].features[1].dtype == String and feature_views[0].features[2].name == "fs1_my_feature_3" - and feature_views[0].features[2].dtype == ValueType.STRING_LIST + and feature_views[0].features[2].dtype == Array(String) and feature_views[0].features[3].name == "fs1_my_feature_4" - and feature_views[0].features[3].dtype == ValueType.BYTES_LIST + and feature_views[0].features[3].dtype == Array(Bytes) and feature_views[0].entities[0] == "fs1_my_entity_1" ) @@ -189,13 +213,13 @@ def test_apply_feature_view_success(test_registry): assert ( feature_view.name == "my_feature_view_1" and feature_view.features[0].name == "fs1_my_feature_1" - and feature_view.features[0].dtype == ValueType.INT64 + and feature_view.features[0].dtype == Int64 and feature_view.features[1].name == "fs1_my_feature_2" - and feature_view.features[1].dtype == ValueType.STRING + and feature_view.features[1].dtype == String and feature_view.features[2].name == "fs1_my_feature_3" - and feature_view.features[2].dtype == ValueType.STRING_LIST + and feature_view.features[2].dtype == Array(String) and feature_view.features[3].name == "fs1_my_feature_4" - and feature_view.features[3].dtype == ValueType.BYTES_LIST + and feature_view.features[3].dtype == Array(Bytes) and feature_view.entities[0] == "fs1_my_entity_1" ) @@ -203,6 +227,134 @@ def test_apply_feature_view_success(test_registry): feature_views = test_registry.list_feature_views(project) assert len(feature_views) == 0 + test_registry.teardown() + + # Will try to reload registry, which will fail because the file has been deleted + with pytest.raises(FileNotFoundError): + test_registry._get_registry_proto() + + +@pytest.mark.parametrize( + "test_registry", [lazy_fixture("local_registry")], +) +# TODO(kevjumba): remove this in feast 0.23 when deprecating +@pytest.mark.parametrize( + "request_source_schema", + [[Field(name="my_input_1", dtype=Int32)], {"my_input_1": ValueType.INT32}], +) +def test_modify_feature_views_success(test_registry, request_source_schema): + # Create Feature Views + batch_source = FileSource( + file_format=ParquetFormat(), + path="file://feast/*", + timestamp_field="ts_col", + created_timestamp_column="timestamp", + ) + + request_source = RequestSource(name="request_source", schema=request_source_schema,) + + fv1 = FeatureView( + name="my_feature_view_1", + schema=[Field(name="fs1_my_feature_1", dtype=Int64)], + entities=["fs1_my_entity_1"], + tags={"team": "matchmaking"}, + batch_source=batch_source, + ttl=timedelta(minutes=5), + ) + + @on_demand_feature_view( + features=[ + Feature(name="odfv1_my_feature_1", dtype=ValueType.STRING), + Feature(name="odfv1_my_feature_2", dtype=ValueType.INT32), + ], + sources={"request_source": request_source}, + ) + def odfv1(feature_df: pd.DataFrame) -> pd.DataFrame: + data = pd.DataFrame() + data["odfv1_my_feature_1"] = feature_df["my_input_1"].astype("category") + data["odfv1_my_feature_2"] = feature_df["my_input_1"].astype("int32") + return data + + project = "project" + + # Register Feature Views + test_registry.apply_feature_view(odfv1, project) + test_registry.apply_feature_view(fv1, project) + + # Modify odfv by changing a single feature dtype + @on_demand_feature_view( + features=[ + Feature(name="odfv1_my_feature_1", dtype=ValueType.FLOAT), + Feature(name="odfv1_my_feature_2", dtype=ValueType.INT32), + ], + sources={"request_source": request_source}, + ) + def odfv1(feature_df: pd.DataFrame) -> pd.DataFrame: + data = pd.DataFrame() + data["odfv1_my_feature_1"] = feature_df["my_input_1"].astype("float") + data["odfv1_my_feature_2"] = feature_df["my_input_1"].astype("int32") + return data + + # Apply the modified odfv + test_registry.apply_feature_view(odfv1, project) + + # Check odfv + on_demand_feature_views = test_registry.list_on_demand_feature_views(project) + + assert ( + len(on_demand_feature_views) == 1 + and on_demand_feature_views[0].name == "odfv1" + and on_demand_feature_views[0].features[0].name == "odfv1_my_feature_1" + and on_demand_feature_views[0].features[0].dtype == Float32 + and on_demand_feature_views[0].features[1].name == "odfv1_my_feature_2" + and on_demand_feature_views[0].features[1].dtype == Int32 + ) + request_schema = on_demand_feature_views[0].get_request_data_schema() + assert ( + list(request_schema.keys())[0] == "my_input_1" + and list(request_schema.values())[0] == ValueType.INT32 + ) + + feature_view = test_registry.get_on_demand_feature_view("odfv1", project) + assert ( + feature_view.name == "odfv1" + and feature_view.features[0].name == "odfv1_my_feature_1" + and feature_view.features[0].dtype == Float32 + and feature_view.features[1].name == "odfv1_my_feature_2" + and feature_view.features[1].dtype == Int32 + ) + request_schema = feature_view.get_request_data_schema() + assert ( + list(request_schema.keys())[0] == "my_input_1" + and list(request_schema.values())[0] == ValueType.INT32 + ) + + # Make sure fv1 is untouched + feature_views = test_registry.list_feature_views(project) + + # List Feature Views + assert ( + len(feature_views) == 1 + and feature_views[0].name == "my_feature_view_1" + and feature_views[0].features[0].name == "fs1_my_feature_1" + and feature_views[0].features[0].dtype == Int64 + and feature_views[0].entities[0] == "fs1_my_entity_1" + ) + + feature_view = test_registry.get_feature_view("my_feature_view_1", project) + assert ( + feature_view.name == "my_feature_view_1" + and feature_view.features[0].name == "fs1_my_feature_1" + and feature_view.features[0].dtype == Int64 + and feature_view.entities[0] == "fs1_my_entity_1" + ) + + test_registry.teardown() + + # Will try to reload registry, which will fail because the file has been deleted + with pytest.raises(FileNotFoundError): + test_registry._get_registry_proto() + @pytest.mark.integration @pytest.mark.parametrize( @@ -212,23 +364,22 @@ def test_apply_feature_view_integration(test_registry): # Create Feature Views batch_source = FileSource( file_format=ParquetFormat(), - file_url="file://feast/*", - event_timestamp_column="ts_col", + path="file://feast/*", + timestamp_field="ts_col", created_timestamp_column="timestamp", - date_partition_column="date_partition_col", ) fv1 = FeatureView( name="my_feature_view_1", - features=[ - Feature(name="fs1_my_feature_1", dtype=ValueType.INT64), - Feature(name="fs1_my_feature_2", dtype=ValueType.STRING), - Feature(name="fs1_my_feature_3", dtype=ValueType.STRING_LIST), - Feature(name="fs1_my_feature_4", dtype=ValueType.BYTES_LIST), + schema=[ + Field(name="fs1_my_feature_1", dtype=Int64), + Field(name="fs1_my_feature_2", dtype=String), + Field(name="fs1_my_feature_3", dtype=Array(String)), + Field(name="fs1_my_feature_4", dtype=Array(Bytes)), ], entities=["fs1_my_entity_1"], tags={"team": "matchmaking"}, - input=batch_source, + batch_source=batch_source, ttl=timedelta(minutes=5), ) @@ -244,13 +395,13 @@ def test_apply_feature_view_integration(test_registry): len(feature_views) == 1 and feature_views[0].name == "my_feature_view_1" and feature_views[0].features[0].name == "fs1_my_feature_1" - and feature_views[0].features[0].dtype == ValueType.INT64 + and feature_views[0].features[0].dtype == Int64 and feature_views[0].features[1].name == "fs1_my_feature_2" - and feature_views[0].features[1].dtype == ValueType.STRING + and feature_views[0].features[1].dtype == String and feature_views[0].features[2].name == "fs1_my_feature_3" - and feature_views[0].features[2].dtype == ValueType.STRING_LIST + and feature_views[0].features[2].dtype == Array(String) and feature_views[0].features[3].name == "fs1_my_feature_4" - and feature_views[0].features[3].dtype == ValueType.BYTES_LIST + and feature_views[0].features[3].dtype == Array(Bytes) and feature_views[0].entities[0] == "fs1_my_entity_1" ) @@ -258,13 +409,13 @@ def test_apply_feature_view_integration(test_registry): assert ( feature_view.name == "my_feature_view_1" and feature_view.features[0].name == "fs1_my_feature_1" - and feature_view.features[0].dtype == ValueType.INT64 + and feature_view.features[0].dtype == Int64 and feature_view.features[1].name == "fs1_my_feature_2" - and feature_view.features[1].dtype == ValueType.STRING + and feature_view.features[1].dtype == String and feature_view.features[2].name == "fs1_my_feature_3" - and feature_view.features[2].dtype == ValueType.STRING_LIST + and feature_view.features[2].dtype == Array(String) and feature_view.features[3].name == "fs1_my_feature_4" - and feature_view.features[3].dtype == ValueType.BYTES_LIST + and feature_view.features[3].dtype == Array(Bytes) and feature_view.entities[0] == "fs1_my_entity_1" ) @@ -272,16 +423,86 @@ def test_apply_feature_view_integration(test_registry): feature_views = test_registry.list_feature_views(project) assert len(feature_views) == 0 + test_registry.teardown() + + # Will try to reload registry, which will fail because the file has been deleted + with pytest.raises(FileNotFoundError): + test_registry._get_registry_proto() + + +@pytest.mark.integration +@pytest.mark.parametrize( + "test_registry", [lazy_fixture("gcs_registry"), lazy_fixture("s3_registry")], +) +def test_apply_data_source(test_registry: Registry): + # Create Feature Views + batch_source = FileSource( + name="test_source", + file_format=ParquetFormat(), + path="file://feast/*", + timestamp_field="ts_col", + created_timestamp_column="timestamp", + ) + + fv1 = FeatureView( + name="my_feature_view_1", + schema=[ + Field(name="fs1_my_feature_1", dtype=Int64), + Field(name="fs1_my_feature_2", dtype=String), + Field(name="fs1_my_feature_3", dtype=Array(String)), + Field(name="fs1_my_feature_4", dtype=Array(Bytes)), + ], + entities=["fs1_my_entity_1"], + tags={"team": "matchmaking"}, + batch_source=batch_source, + ttl=timedelta(minutes=5), + ) + + project = "project" + + # Register data source and feature view + test_registry.apply_data_source(batch_source, project, commit=False) + test_registry.apply_feature_view(fv1, project, commit=True) + + registry_feature_views = test_registry.list_feature_views(project) + registry_data_sources = test_registry.list_data_sources(project) + assert len(registry_feature_views) == 1 + assert len(registry_data_sources) == 1 + registry_feature_view = registry_feature_views[0] + assert registry_feature_view.batch_source == batch_source + registry_data_source = registry_data_sources[0] + assert registry_data_source == batch_source + + # Check that change to batch source propagates + batch_source.timestamp_field = "new_ts_col" + test_registry.apply_data_source(batch_source, project, commit=False) + test_registry.apply_feature_view(fv1, project, commit=True) + registry_feature_views = test_registry.list_feature_views(project) + registry_data_sources = test_registry.list_data_sources(project) + assert len(registry_feature_views) == 1 + assert len(registry_data_sources) == 1 + registry_feature_view = registry_feature_views[0] + assert registry_feature_view.batch_source == batch_source + registry_batch_source = test_registry.list_data_sources(project)[0] + assert registry_batch_source == batch_source + + test_registry.teardown() + + # Will try to reload registry, which will fail because the file has been deleted + with pytest.raises(FileNotFoundError): + test_registry._get_registry_proto() + def test_commit(): fd, registry_path = mkstemp() - test_registry = Registry(registry_path, None, timedelta(600)) + registry_config = RegistryConfig(path=registry_path, cache_ttl_seconds=600) + test_registry = Registry(registry_config, None) entity = Entity( name="driver_car_id", description="Car driver id", value_type=ValueType.STRING, - labels={"team": "matchmaking"}, + tags={"team": "matchmaking"}, ) project = "project" @@ -298,8 +519,8 @@ def test_commit(): and entity.name == "driver_car_id" and entity.value_type == ValueType(ValueProto.ValueType.STRING) and entity.description == "Car driver id" - and "team" in entity.labels - and entity.labels["team"] == "matchmaking" + and "team" in entity.tags + and entity.tags["team"] == "matchmaking" ) entity = test_registry.get_entity("driver_car_id", project, allow_cache=True) @@ -307,12 +528,12 @@ def test_commit(): entity.name == "driver_car_id" and entity.value_type == ValueType(ValueProto.ValueType.STRING) and entity.description == "Car driver id" - and "team" in entity.labels - and entity.labels["team"] == "matchmaking" + and "team" in entity.tags + and entity.tags["team"] == "matchmaking" ) # Create new registry that points to the same store - registry_with_same_store = Registry(registry_path, None, timedelta(600)) + registry_with_same_store = Registry(registry_config, None) # Retrieving the entity should fail since the store is empty entities = registry_with_same_store.list_entities(project) @@ -322,7 +543,7 @@ def test_commit(): test_registry.commit() # Reconstruct the new registry in order to read the newly written store - registry_with_same_store = Registry(registry_path, None, timedelta(600)) + registry_with_same_store = Registry(registry_config, None) # Retrieving the entity should now succeed entities = registry_with_same_store.list_entities(project) @@ -333,8 +554,8 @@ def test_commit(): and entity.name == "driver_car_id" and entity.value_type == ValueType(ValueProto.ValueType.STRING) and entity.description == "Car driver id" - and "team" in entity.labels - and entity.labels["team"] == "matchmaking" + and "team" in entity.tags + and entity.tags["team"] == "matchmaking" ) entity = test_registry.get_entity("driver_car_id", project) @@ -342,6 +563,12 @@ def test_commit(): entity.name == "driver_car_id" and entity.value_type == ValueType(ValueProto.ValueType.STRING) and entity.description == "Car driver id" - and "team" in entity.labels - and entity.labels["team"] == "matchmaking" + and "team" in entity.tags + and entity.tags["team"] == "matchmaking" ) + + test_registry.teardown() + + # Will try to reload registry, which will fail because the file has been deleted + with pytest.raises(FileNotFoundError): + test_registry._get_registry_proto() diff --git a/sdk/python/tests/integration/registration/test_universal_odfv_feature_inference.py b/sdk/python/tests/integration/registration/test_universal_odfv_feature_inference.py new file mode 100644 index 0000000000..9dced8f13a --- /dev/null +++ b/sdk/python/tests/integration/registration/test_universal_odfv_feature_inference.py @@ -0,0 +1,90 @@ +from datetime import datetime + +import pandas as pd +import pytest + +from feast import Field +from feast.errors import SpecifiedFeaturesNotPresentError +from feast.infra.offline_stores.file_source import FileSource +from feast.types import Float64 +from tests.integration.feature_repos.universal.entities import customer, driver, item +from tests.integration.feature_repos.universal.feature_views import ( + conv_rate_plus_100_feature_view, + create_conv_rate_request_source, + create_driver_hourly_stats_feature_view, + create_item_embeddings_feature_view, + create_similarity_request_source, + similarity_feature_view, +) + + +@pytest.mark.integration +@pytest.mark.universal +@pytest.mark.parametrize("infer_features", [True, False], ids=lambda v: str(v)) +def test_infer_odfv_features(environment, universal_data_sources, infer_features): + store = environment.feature_store + + (entities, datasets, data_sources) = universal_data_sources + + driver_hourly_stats = create_driver_hourly_stats_feature_view(data_sources.driver) + request_source = create_conv_rate_request_source() + driver_odfv = conv_rate_plus_100_feature_view( + {"driver": driver_hourly_stats, "input_request": request_source}, + infer_features=infer_features, + ) + + feast_objects = [driver_hourly_stats, driver_odfv, driver(), customer()] + store.apply(feast_objects) + odfv = store.get_on_demand_feature_view("conv_rate_plus_100") + assert len(odfv.features) == 3 + + +@pytest.mark.integration +@pytest.mark.parametrize("infer_features", [True, False], ids=lambda v: str(v)) +def test_infer_odfv_list_features(environment, infer_features, tmp_path): + fake_embedding = [1.0, 1.0] + items_df = pd.DataFrame( + data={ + "item_id": [0], + "embedding_float": [fake_embedding], + "embedding_double": [fake_embedding], + "event_timestamp": [pd.Timestamp(datetime.utcnow())], + "created": [pd.Timestamp(datetime.utcnow())], + } + ) + output_path = f"{tmp_path}/items.parquet" + items_df.to_parquet(output_path) + fake_items_src = FileSource( + path=output_path, + timestamp_field="event_timestamp", + created_timestamp_column="created", + ) + items = create_item_embeddings_feature_view(fake_items_src) + sim_odfv = similarity_feature_view( + {"items": items, "input_request": create_similarity_request_source()}, + infer_features=infer_features, + ) + store = environment.feature_store + store.apply([item(), items, sim_odfv]) + odfv = store.get_on_demand_feature_view("similarity") + assert len(odfv.features) == 2 + + +@pytest.mark.integration +@pytest.mark.universal +def test_infer_odfv_features_with_error(environment, universal_data_sources): + store = environment.feature_store + + (entities, datasets, data_sources) = universal_data_sources + + features = [Field(name="conv_rate_plus_200", dtype=Float64)] + driver_hourly_stats = create_driver_hourly_stats_feature_view(data_sources.driver) + request_source = create_conv_rate_request_source() + driver_odfv = conv_rate_plus_100_feature_view( + {"driver": driver_hourly_stats, "input_request": request_source}, + features=features, + ) + + feast_objects = [driver_hourly_stats, driver_odfv, driver(), customer()] + with pytest.raises(SpecifiedFeaturesNotPresentError): + store.apply(feast_objects) diff --git a/sdk/python/tests/integration/registration/test_universal_types.py b/sdk/python/tests/integration/registration/test_universal_types.py new file mode 100644 index 0000000000..81fa0200fd --- /dev/null +++ b/sdk/python/tests/integration/registration/test_universal_types.py @@ -0,0 +1,376 @@ +import logging +from dataclasses import dataclass +from datetime import datetime, timedelta +from typing import Any, Dict, List, Tuple, Union + +import numpy as np +import pandas as pd +import pyarrow as pa +import pytest + +from feast.infra.offline_stores.offline_store import RetrievalJob +from feast.types import Array, Bool, Float32, Int32, Int64, UnixTimestamp +from feast.value_type import ValueType +from tests.data.data_creator import create_dataset +from tests.integration.feature_repos.repo_configuration import ( + FULL_REPO_CONFIGS, + REDIS_CONFIG, + IntegrationTestRepoConfig, + construct_test_environment, +) +from tests.integration.feature_repos.universal.entities import driver +from tests.integration.feature_repos.universal.feature_views import driver_feature_view + +logger = logging.getLogger(__name__) + + +def populate_test_configs(offline: bool): + entity_type_feature_dtypes = [ + (ValueType.INT32, "int32"), + (ValueType.INT64, "int64"), + (ValueType.STRING, "float"), + (ValueType.STRING, "bool"), + (ValueType.INT32, "datetime"), + ] + configs: List[TypeTestConfig] = [] + for test_repo_config in FULL_REPO_CONFIGS: + for entity_type, feature_dtype in entity_type_feature_dtypes: + for feature_is_list in [True, False]: + # Redshift doesn't support list features + if test_repo_config.provider == "aws" and feature_is_list is True: + continue + # For offline tests, don't need to vary for online store + if offline and test_repo_config.online_store == REDIS_CONFIG: + continue + for has_empty_list in [True, False]: + # For non list features `has_empty_list` does nothing + if feature_is_list is False and has_empty_list is True: + continue + configs.append( + TypeTestConfig( + entity_type=entity_type, + feature_dtype=feature_dtype, + feature_is_list=feature_is_list, + has_empty_list=has_empty_list, + test_repo_config=test_repo_config, + ) + ) + return configs + + +@dataclass(frozen=True, repr=True) +class TypeTestConfig: + entity_type: ValueType + feature_dtype: str + feature_is_list: bool + has_empty_list: bool + test_repo_config: IntegrationTestRepoConfig + + +OFFLINE_TYPE_TEST_CONFIGS: List[TypeTestConfig] = populate_test_configs(offline=True) +ONLINE_TYPE_TEST_CONFIGS: List[TypeTestConfig] = populate_test_configs(offline=False) + + +@pytest.fixture( + params=OFFLINE_TYPE_TEST_CONFIGS, + scope="session", + ids=[str(c) for c in OFFLINE_TYPE_TEST_CONFIGS], +) +def offline_types_test_fixtures(request): + return get_fixtures(request) + + +@pytest.fixture( + params=ONLINE_TYPE_TEST_CONFIGS, + scope="session", + ids=[str(c) for c in ONLINE_TYPE_TEST_CONFIGS], +) +def online_types_test_fixtures(request): + return get_fixtures(request) + + +def get_fixtures(request): + config: TypeTestConfig = request.param + # Lower case needed because Redshift lower-cases all table names + test_project_id = f"{config.entity_type}{config.feature_dtype}{config.feature_is_list}".replace( + ".", "" + ).lower() + type_test_environment = construct_test_environment( + test_repo_config=config.test_repo_config, + test_suite_name=f"test_{test_project_id}", + ) + config = request.param + df = create_dataset( + config.entity_type, + config.feature_dtype, + config.feature_is_list, + config.has_empty_list, + ) + data_source = type_test_environment.data_source_creator.create_data_source( + df, + destination_name=type_test_environment.feature_store.project, + field_mapping={"ts_1": "ts"}, + ) + fv = create_feature_view( + request.fixturename, + config.feature_dtype, + config.feature_is_list, + config.has_empty_list, + data_source, + ) + + def cleanup(): + try: + type_test_environment.data_source_creator.teardown() + except Exception: # noqa + logger.exception("DataSourceCreator teardown has failed") + + type_test_environment.feature_store.teardown() + + request.addfinalizer(cleanup) + + return type_test_environment, config, data_source, fv + + +@pytest.mark.integration +@pytest.mark.universal +def test_entity_inference_types_match(offline_types_test_fixtures): + environment, config, data_source, fv = offline_types_test_fixtures + fs = environment.feature_store + + # Don't specify value type in entity to force inference + entity = driver(value_type=ValueType.UNKNOWN) + fs.apply([fv, entity]) + + entities = fs.list_entities() + entity_type_to_expected_inferred_entity_type = { + ValueType.INT32: ValueType.INT64, + ValueType.INT64: ValueType.INT64, + ValueType.FLOAT: ValueType.DOUBLE, + ValueType.STRING: ValueType.STRING, + } + for entity in entities: + assert ( + entity.value_type + == entity_type_to_expected_inferred_entity_type[config.entity_type] + ) + + +@pytest.mark.integration +@pytest.mark.universal +def test_feature_get_historical_features_types_match(offline_types_test_fixtures): + """ + Note: to make sure this test works, we need to ensure that get_historical_features + returns at least one non-null row to make sure type inferral works. This can only + be achieved by carefully matching entity_df to the data fixtures. + """ + environment, config, data_source, fv = offline_types_test_fixtures + fs = environment.feature_store + entity = driver() + fv = create_feature_view( + "get_historical_features_types_match", + config.feature_dtype, + config.feature_is_list, + config.has_empty_list, + data_source, + ) + fs.apply([fv, entity]) + + entity_df = pd.DataFrame() + entity_df["driver_id"] = ( + ["1", "3"] if config.entity_type == ValueType.STRING else [1, 3] + ) + ts = pd.Timestamp(datetime.utcnow()).round("ms") + entity_df["ts"] = [ + ts - timedelta(hours=4), + ts - timedelta(hours=2), + ] + features = [f"{fv.name}:value"] + + historical_features = fs.get_historical_features( + entity_df=entity_df, features=features, + ) + # Note: Pandas doesn't play well with nan values in ints. BQ will also coerce to floats if there are NaNs + historical_features_df = historical_features.to_df() + print(historical_features_df) + + if config.feature_is_list: + assert_feature_list_types( + environment.test_repo_config.provider, + config.feature_dtype, + historical_features_df, + ) + else: + assert_expected_historical_feature_types( + config.feature_dtype, historical_features_df + ) + assert_expected_arrow_types( + environment.test_repo_config.provider, + config.feature_dtype, + config.feature_is_list, + historical_features, + ) + + +@pytest.mark.integration +@pytest.mark.universal +def test_feature_get_online_features_types_match(online_types_test_fixtures): + environment, config, data_source, fv = online_types_test_fixtures + fv = create_feature_view( + "get_online_features_types_match", + config.feature_dtype, + config.feature_is_list, + config.has_empty_list, + data_source, + ) + fs = environment.feature_store + features = [fv.name + ":value"] + entity = driver(value_type=config.entity_type) + fs.apply([fv, entity]) + fs.materialize( + environment.start_date, + environment.end_date + - timedelta(hours=1) # throwing out last record to make sure + # we can successfully infer type even from all empty values + ) + + driver_id_value = "1" if config.entity_type == ValueType.STRING else 1 + online_features = fs.get_online_features( + features=features, entity_rows=[{"driver_id": driver_id_value}], + ).to_dict() + + feature_list_dtype_to_expected_online_response_value_type = { + "int32": int, + "int64": int, + "float": float, + "string": str, + "bool": bool, + "datetime": datetime, + } + expected_dtype = feature_list_dtype_to_expected_online_response_value_type[ + config.feature_dtype + ] + + assert len(online_features["value"]) == 1 + + if config.feature_is_list: + for feature in online_features["value"]: + assert isinstance(feature, list), "Feature value should be a list" + assert ( + config.has_empty_list or len(feature) > 0 + ), "List of values should not be empty" + for element in feature: + assert isinstance(element, expected_dtype) + else: + for feature in online_features["value"]: + assert isinstance(feature, expected_dtype) + + +def create_feature_view( + name, feature_dtype, feature_is_list, has_empty_list, data_source +): + if feature_is_list is True: + if feature_dtype == "int32": + dtype = Array(Int32) + elif feature_dtype == "int64": + dtype = Array(Int64) + elif feature_dtype == "float": + dtype = Array(Float32) + elif feature_dtype == "bool": + dtype = Array(Bool) + elif feature_dtype == "datetime": + dtype = Array(UnixTimestamp) + else: + if feature_dtype == "int32": + dtype = Int32 + elif feature_dtype == "int64": + dtype = Int64 + elif feature_dtype == "float": + dtype = Float32 + elif feature_dtype == "bool": + dtype = Bool + elif feature_dtype == "datetime": + dtype = UnixTimestamp + + return driver_feature_view(data_source, name=name, dtype=dtype,) + + +def assert_expected_historical_feature_types( + feature_dtype: str, historical_features_df: pd.DataFrame +): + print("Asserting historical feature types") + feature_dtype_to_expected_historical_feature_dtype = { + "int32": (pd.api.types.is_integer_dtype,), + "int64": (pd.api.types.is_int64_dtype,), + "float": (pd.api.types.is_float_dtype,), + "string": (pd.api.types.is_string_dtype,), + "bool": (pd.api.types.is_bool_dtype, pd.api.types.is_object_dtype), + "datetime": (pd.api.types.is_datetime64_any_dtype,), + } + dtype_checkers = feature_dtype_to_expected_historical_feature_dtype[feature_dtype] + assert any( + check(historical_features_df.dtypes["value"]) for check in dtype_checkers + ) + + +def assert_feature_list_types( + provider: str, feature_dtype: str, historical_features_df: pd.DataFrame +): + print("Asserting historical feature list types") + feature_list_dtype_to_expected_historical_feature_list_dtype: Dict[ + str, Union[type, Tuple[Union[type, Tuple[Any, ...]], ...]] + ] = { + "int32": ( + int, + np.int64, + ), # Can be `np.int64` if from `np.array` rather that `list` + "int64": ( + int, + np.int64, + ), # Can be `np.int64` if from `np.array` rather that `list` + "float": float, + "string": str, + "bool": ( + bool, + np.bool_, + ), # Can be `np.bool_` if from `np.array` rather that `list` + "datetime": (np.datetime64, datetime,), # datetime.datetime + } + expected_dtype = feature_list_dtype_to_expected_historical_feature_list_dtype[ + feature_dtype + ] + assert pd.api.types.is_object_dtype(historical_features_df.dtypes["value"]) + for feature in historical_features_df.value: + assert isinstance(feature, (np.ndarray, list)) + for element in feature: + assert isinstance(element, expected_dtype) + + +def assert_expected_arrow_types( + provider: str, + feature_dtype: str, + feature_is_list: bool, + historical_features: RetrievalJob, +): + print("Asserting historical feature arrow types") + historical_features_arrow = historical_features.to_arrow() + print(historical_features_arrow) + feature_list_dtype_to_expected_historical_feature_arrow_type = { + "int32": pa.types.is_int64, + "int64": pa.types.is_int64, + "float": pa.types.is_float64, + "string": pa.types.is_string, + "bool": pa.types.is_boolean, + "date": pa.types.is_date, + "datetime": pa.types.is_timestamp, + } + arrow_type_checker = feature_list_dtype_to_expected_historical_feature_arrow_type[ + feature_dtype + ] + pa_type = historical_features_arrow.schema.field("value").type + + if feature_is_list: + assert pa.types.is_list(pa_type) + assert arrow_type_checker(pa_type.value_type) + else: + assert arrow_type_checker(pa_type) diff --git a/sdk/python/tests/integration/scaffolding/test_partial_apply.py b/sdk/python/tests/integration/scaffolding/test_partial_apply.py index cd8be2fe23..3ab9bf196f 100644 --- a/sdk/python/tests/integration/scaffolding/test_partial_apply.py +++ b/sdk/python/tests/integration/scaffolding/test_partial_apply.py @@ -1,7 +1,9 @@ +from datetime import timedelta + import pytest -from google.protobuf.duration_pb2 import Duration -from feast import BigQuerySource, Feature, FeatureView, ValueType +from feast import BigQuerySource, FeatureView, Field +from feast.types import Float32, String from tests.utils.cli_utils import CliRunner, get_example_repo from tests.utils.online_read_write_test import basic_rw_test @@ -19,22 +21,22 @@ def test_partial() -> None: ) as store: driver_locations_source = BigQuerySource( - table_ref="feast-oss.public.drivers", - event_timestamp_column="event_timestamp", + table="feast-oss.public.drivers", + timestamp_field="event_timestamp", created_timestamp_column="created_timestamp", ) driver_locations_100 = FeatureView( name="driver_locations_100", entities=["driver"], - ttl=Duration(seconds=86400 * 1), - features=[ - Feature(name="lat", dtype=ValueType.FLOAT), - Feature(name="lon", dtype=ValueType.STRING), - Feature(name="name", dtype=ValueType.STRING), + ttl=timedelta(days=1), + schema=[ + Field(name="lat", dtype=Float32), + Field(name="lon", dtype=String), + Field(name="name", dtype=String), ], online=True, - input=driver_locations_source, + batch_source=driver_locations_source, tags={}, ) diff --git a/sdk/python/tests/integration/scaffolding/test_repo_config.py b/sdk/python/tests/integration/scaffolding/test_repo_config.py index f4e15d497f..3ec91c0044 100644 --- a/sdk/python/tests/integration/scaffolding/test_repo_config.py +++ b/sdk/python/tests/integration/scaffolding/test_repo_config.py @@ -34,6 +34,49 @@ def _test_config(config_text, expect_error: Optional[str]): return rc +def test_nullable_online_store_aws(): + _test_config( + dedent( + """ + project: foo + registry: "registry.db" + provider: aws + online_store: null + """ + ), + expect_error="__root__ -> offline_store -> cluster_id\n" + " field required (type=value_error.missing)", + ) + + +def test_nullable_online_store_gcp(): + _test_config( + dedent( + """ + project: foo + registry: "registry.db" + provider: gcp + online_store: null + """ + ), + expect_error=None, + ) + + +def test_nullable_online_store_local(): + _test_config( + dedent( + """ + project: foo + registry: "registry.db" + provider: local + online_store: null + """ + ), + expect_error=None, + ) + + def test_local_config(): _test_config( dedent( @@ -153,3 +196,27 @@ def test_no_project(): "project\n" " field required (type=value_error.missing)", ) + + +def test_invalid_project_name(): + _test_config( + dedent( + """ + project: foo-1 + registry: "registry.db" + provider: local + """ + ), + expect_error="alphanumerical values ", + ) + + _test_config( + dedent( + """ + project: _foo + registry: "registry.db" + provider: local + """ + ), + expect_error="alphanumerical values ", + ) diff --git a/sdk/python/tests/unit/diff/test_infra_diff.py b/sdk/python/tests/unit/diff/test_infra_diff.py new file mode 100644 index 0000000000..8e3d5b765f --- /dev/null +++ b/sdk/python/tests/unit/diff/test_infra_diff.py @@ -0,0 +1,154 @@ +from google.protobuf import wrappers_pb2 as wrappers + +from feast.diff.infra_diff import ( + diff_between, + diff_infra_protos, + tag_infra_proto_objects_for_keep_delete_add, +) +from feast.diff.property_diff import TransitionType +from feast.infra.online_stores.datastore import DatastoreTable +from feast.infra.online_stores.dynamodb import DynamoDBTable +from feast.protos.feast.core.InfraObject_pb2 import Infra as InfraProto + + +def test_tag_infra_proto_objects_for_keep_delete_add(): + to_delete = DynamoDBTable(name="to_delete", region="us-west-2").to_proto() + to_add = DynamoDBTable(name="to_add", region="us-west-2").to_proto() + unchanged_table = DynamoDBTable(name="unchanged", region="us-west-2").to_proto() + pre_changed = DynamoDBTable(name="table", region="us-west-2").to_proto() + post_changed = DynamoDBTable(name="table", region="us-east-2").to_proto() + + keep, delete, add = tag_infra_proto_objects_for_keep_delete_add( + [to_delete, unchanged_table, pre_changed], + [to_add, unchanged_table, post_changed], + ) + + assert len(list(keep)) == 2 + assert unchanged_table in keep + assert post_changed in keep + assert to_add not in keep + assert len(list(delete)) == 1 + assert to_delete in delete + assert unchanged_table not in delete + assert pre_changed not in delete + assert len(list(add)) == 1 + assert to_add in add + assert unchanged_table not in add + assert post_changed not in add + + +def test_diff_between_datastore_tables(): + pre_changed = DatastoreTable( + project="test", name="table", project_id="pre", namespace="pre" + ).to_proto() + post_changed = DatastoreTable( + project="test", name="table", project_id="post", namespace="post" + ).to_proto() + + infra_object_diff = diff_between(pre_changed, pre_changed, "datastore table") + infra_object_property_diffs = infra_object_diff.infra_object_property_diffs + assert len(infra_object_property_diffs) == 0 + + infra_object_diff = diff_between(pre_changed, post_changed, "datastore table") + infra_object_property_diffs = infra_object_diff.infra_object_property_diffs + assert len(infra_object_property_diffs) == 2 + + assert infra_object_property_diffs[0].property_name == "project_id" + assert infra_object_property_diffs[0].val_existing == wrappers.StringValue( + value="pre" + ) + assert infra_object_property_diffs[0].val_declared == wrappers.StringValue( + value="post" + ) + assert infra_object_property_diffs[1].property_name == "namespace" + assert infra_object_property_diffs[1].val_existing == wrappers.StringValue( + value="pre" + ) + assert infra_object_property_diffs[1].val_declared == wrappers.StringValue( + value="post" + ) + + +def test_diff_infra_protos(): + to_delete = DynamoDBTable(name="to_delete", region="us-west-2") + to_add = DynamoDBTable(name="to_add", region="us-west-2") + unchanged_table = DynamoDBTable(name="unchanged", region="us-west-2") + pre_changed = DatastoreTable( + project="test", name="table", project_id="pre", namespace="pre" + ) + post_changed = DatastoreTable( + project="test", name="table", project_id="post", namespace="post" + ) + + infra_objects_before = [to_delete, unchanged_table, pre_changed] + infra_objects_after = [to_add, unchanged_table, post_changed] + + infra_proto_before = InfraProto() + infra_proto_before.infra_objects.extend( + [obj.to_infra_object_proto() for obj in infra_objects_before] + ) + + infra_proto_after = InfraProto() + infra_proto_after.infra_objects.extend( + [obj.to_infra_object_proto() for obj in infra_objects_after] + ) + + infra_diff = diff_infra_protos(infra_proto_before, infra_proto_after) + infra_object_diffs = infra_diff.infra_object_diffs + + # There should be one addition, one deletion, one unchanged, and one changed. + assert len(infra_object_diffs) == 4 + + additions = [ + infra_object_diff + for infra_object_diff in infra_object_diffs + if infra_object_diff.transition_type == TransitionType.CREATE + ] + assert len(additions) == 1 + assert not additions[0].current_infra_object + assert additions[0].new_infra_object == to_add.to_proto() + assert len(additions[0].infra_object_property_diffs) == 0 + + deletions = [ + infra_object_diff + for infra_object_diff in infra_object_diffs + if infra_object_diff.transition_type == TransitionType.DELETE + ] + assert len(deletions) == 1 + assert deletions[0].current_infra_object == to_delete.to_proto() + assert not deletions[0].new_infra_object + assert len(deletions[0].infra_object_property_diffs) == 0 + + unchanged = [ + infra_object_diff + for infra_object_diff in infra_object_diffs + if infra_object_diff.transition_type == TransitionType.UNCHANGED + ] + assert len(unchanged) == 1 + assert unchanged[0].current_infra_object == unchanged_table.to_proto() + assert unchanged[0].new_infra_object == unchanged_table.to_proto() + assert len(unchanged[0].infra_object_property_diffs) == 0 + + updates = [ + infra_object_diff + for infra_object_diff in infra_object_diffs + if infra_object_diff.transition_type == TransitionType.UPDATE + ] + assert len(updates) == 1 + assert updates[0].current_infra_object == pre_changed.to_proto() + assert updates[0].new_infra_object == post_changed.to_proto() + assert len(updates[0].infra_object_property_diffs) == 2 + assert updates[0].infra_object_property_diffs[0].property_name == "project_id" + assert updates[0].infra_object_property_diffs[ + 0 + ].val_existing == wrappers.StringValue(value="pre") + assert updates[0].infra_object_property_diffs[ + 0 + ].val_declared == wrappers.StringValue(value="post") + assert updates[0].infra_object_property_diffs[1].property_name == "namespace" + assert updates[0].infra_object_property_diffs[ + 1 + ].val_existing == wrappers.StringValue(value="pre") + assert updates[0].infra_object_property_diffs[ + 1 + ].val_declared == wrappers.StringValue(value="post") diff --git a/sdk/python/tests/unit/diff/test_registry_diff.py b/sdk/python/tests/unit/diff/test_registry_diff.py new file mode 100644 index 0000000000..483dae73e2 --- /dev/null +++ b/sdk/python/tests/unit/diff/test_registry_diff.py @@ -0,0 +1,86 @@ +from feast.diff.registry_diff import ( + diff_registry_objects, + tag_objects_for_keep_delete_update_add, +) +from feast.feature_view import FeatureView +from tests.utils.data_source_utils import prep_file_source + + +def test_tag_objects_for_keep_delete_update_add(simple_dataset_1): + with prep_file_source(df=simple_dataset_1, timestamp_field="ts_1") as file_source: + to_delete = FeatureView( + name="to_delete", entities=["id"], batch_source=file_source, ttl=None, + ) + unchanged_fv = FeatureView( + name="fv1", entities=["id"], batch_source=file_source, ttl=None, + ) + pre_changed = FeatureView( + name="fv2", + entities=["id"], + batch_source=file_source, + ttl=None, + tags={"when": "before"}, + ) + post_changed = FeatureView( + name="fv2", + entities=["id"], + batch_source=file_source, + ttl=None, + tags={"when": "after"}, + ) + to_add = FeatureView( + name="to_add", entities=["id"], batch_source=file_source, ttl=None, + ) + + keep, delete, update, add = tag_objects_for_keep_delete_update_add( + [unchanged_fv, pre_changed, to_delete], [unchanged_fv, post_changed, to_add] + ) + + assert len(list(keep)) == 2 + assert unchanged_fv in keep + assert pre_changed in keep + assert post_changed not in keep + assert len(list(delete)) == 1 + assert to_delete in delete + assert len(list(update)) == 2 + assert unchanged_fv in update + assert post_changed in update + assert pre_changed not in update + assert len(list(add)) == 1 + assert to_add in add + + +def test_diff_registry_objects_feature_views(simple_dataset_1): + with prep_file_source(df=simple_dataset_1, timestamp_field="ts_1") as file_source: + pre_changed = FeatureView( + name="fv2", + entities=["id"], + batch_source=file_source, + ttl=None, + tags={"when": "before"}, + ) + post_changed = FeatureView( + name="fv2", + entities=["id"], + batch_source=file_source, + ttl=None, + tags={"when": "after"}, + ) + + feast_object_diffs = diff_registry_objects( + pre_changed, pre_changed, "feature view" + ) + assert len(feast_object_diffs.feast_object_property_diffs) == 0 + + feast_object_diffs = diff_registry_objects( + pre_changed, post_changed, "feature view" + ) + assert len(feast_object_diffs.feast_object_property_diffs) == 1 + + assert feast_object_diffs.feast_object_property_diffs[0].property_name == "tags" + assert feast_object_diffs.feast_object_property_diffs[0].val_existing == { + "when": "before" + } + assert feast_object_diffs.feast_object_property_diffs[0].val_declared == { + "when": "after" + } diff --git a/sdk/python/tests/unit/grpc/test_auth.py b/sdk/python/tests/unit/grpc/test_auth.py deleted file mode 100644 index 507c9fa032..0000000000 --- a/sdk/python/tests/unit/grpc/test_auth.py +++ /dev/null @@ -1,199 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2018-2020 The Feast Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import json -from configparser import NoOptionError -from http import HTTPStatus -from unittest.mock import call, patch - -from google.auth.exceptions import DefaultCredentialsError -from pytest import fixture, raises - -from feast.config import Config -from feast.grpc.auth import ( - GoogleOpenIDAuthMetadataPlugin, - OAuthMetadataPlugin, - get_auth_metadata_plugin, -) - -AUDIENCE = "https://testaudience.io/" - -AUTH_URL = "https://test.auth.com/v2/token" - -HEADERS = {"content-type": "application/json"} - -DATA = json.dumps( - { - "grant_type": "client_credentials", - "client_id": "fakeID", - "client_secret": "fakeSecret", - "audience": AUDIENCE, - } -) - - -class MockResponse: - def __init__(self, json_data, status_code): - self.json_data = json_data - self.status_code = status_code - - def json(self): - return self.json_data - - -class GoogleMockResponse: - def __init__(self, stdout): - self.stdout = stdout - - -class GoogleDefaultResponse: - def __init__(self, id_token): - self.id_token = id_token - - def refresh(self, request): - pass - - -class GoogleDefaultErrorResponse: - def __init__(self, id_token): - self.wrong_attribute = id_token - - def refresh(self, request): - pass - - -@fixture -def config_oauth(): - config_dict = { - "core_url": "localhost:50051", - "enable_auth": True, - "auth_provider": "oauth", - "oauth_grant_type": "client_credentials", - "oauth_client_id": "fakeID", - "oauth_client_secret": "fakeSecret", - "oauth_audience": AUDIENCE, - "oauth_token_request_url": AUTH_URL, - } - return Config(config_dict) - - -@fixture -def config_google(): - config_dict = { - "core_url": "localhost:50051", - "enable_auth": True, - "auth_provider": "google", - } - return Config(config_dict) - - -@fixture -def config_with_missing_variable(): - config_dict = { - "core_url": "localhost:50051", - "enable_auth": True, - "auth_provider": "oauth", - "oauth_grant_type": "client_credentials", - "oauth_client_id": "fakeID", - "oauth_client_secret": "fakeSecret", - "oauth_token_request_url": AUTH_URL, - } - return Config(config_dict) - - -@patch( - "requests.post", - return_value=MockResponse({"access_token": "mock_token"}, HTTPStatus.OK), -) -def test_get_auth_metadata_plugin_oauth_should_pass(post, config_oauth): - auth_metadata_plugin = get_auth_metadata_plugin(config_oauth) - assert isinstance(auth_metadata_plugin, OAuthMetadataPlugin) - assert post.call_count == 1 - assert post.call_args == call(AUTH_URL, headers=HEADERS, data=DATA) - assert auth_metadata_plugin.get_signed_meta() == ( - ("authorization", "Bearer mock_token"), - ) - - -@patch( - "requests.post", - return_value=MockResponse({"access_token": "mock_token"}, HTTPStatus.UNAUTHORIZED), -) -def test_get_auth_metadata_plugin_oauth_should_raise_when_response_is_not_200( - post, config_oauth -): - with raises(RuntimeError): - get_auth_metadata_plugin(config_oauth) - assert post.call_count == 1 - assert post.call_args == call(AUTH_URL, headers=HEADERS, data=DATA) - - -def test_get_auth_metadata_plugin_oauth_should_raise_when_config_is_incorrect( - config_with_missing_variable, -): - with raises((RuntimeError, NoOptionError)): - get_auth_metadata_plugin(config_with_missing_variable) - - -@patch( - "google.oauth2.id_token.verify_token", - return_value={"iss": "accounts.google.com", "exp": 12341234}, -) -@patch("google.oauth2.id_token.fetch_id_token", return_value="Some Token") -def test_get_auth_metadata_plugin_google_should_pass_with_token_from_gcloud_sdk( - verify_token, fetch_id_token, config_google -): - auth_metadata_plugin = get_auth_metadata_plugin(config_google) - assert isinstance(auth_metadata_plugin, GoogleOpenIDAuthMetadataPlugin) - assert auth_metadata_plugin.get_signed_meta() == ( - ("authorization", "Bearer Some Token"), - ) - - -@patch( - "google.oauth2.id_token.verify_token", - return_value={"iss": "accounts.google.com", "exp": 12341234}, -) -@patch( - "google.auth.default", - return_value=[ - GoogleDefaultResponse("fake_token"), - GoogleDefaultResponse("project_id"), - ], -) -@patch("google.oauth2.id_token.fetch_id_token", side_effect=DefaultCredentialsError()) -def test_get_auth_metadata_plugin_google_should_pass_with_token_from_google_auth_lib( - verify_token, fetch_id_token, default, config_google -): - auth_metadata_plugin = get_auth_metadata_plugin(config_google) - assert isinstance(auth_metadata_plugin, GoogleOpenIDAuthMetadataPlugin) - assert auth_metadata_plugin.get_signed_meta() == ( - ("authorization", "Bearer fake_token"), - ) - - -@patch( - "google.auth.default", - return_value=[ - GoogleDefaultErrorResponse("fake_token"), - GoogleDefaultErrorResponse("project_id"), - ], -) -@patch("google.oauth2.id_token.fetch_id_token", side_effect=DefaultCredentialsError()) -def test_get_auth_metadata_plugin_google_should_raise_when_token_validation_fails( - fetch_id_token, default, config_google -): - with raises(RuntimeError): - get_auth_metadata_plugin(config_google) diff --git a/sdk/python/tests/unit/infra/online_store/test_dynamodb_online_store.py b/sdk/python/tests/unit/infra/online_store/test_dynamodb_online_store.py new file mode 100644 index 0000000000..e1be890e57 --- /dev/null +++ b/sdk/python/tests/unit/infra/online_store/test_dynamodb_online_store.py @@ -0,0 +1,230 @@ +from copy import deepcopy +from dataclasses import dataclass + +import boto3 +import pytest +from moto import mock_dynamodb2 + +from feast.infra.offline_stores.file import FileOfflineStoreConfig +from feast.infra.online_stores.dynamodb import ( + DynamoDBOnlineStore, + DynamoDBOnlineStoreConfig, + DynamoDBTable, +) +from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto +from feast.protos.feast.types.Value_pb2 import Value as ValueProto +from feast.repo_config import RepoConfig +from tests.utils.online_store_utils import ( + _create_n_customer_test_samples, + _create_test_table, + _insert_data_test_table, +) + +REGISTRY = "s3://test_registry/registry.db" +PROJECT = "test_aws" +PROVIDER = "aws" +TABLE_NAME = "dynamodb_online_store" +REGION = "us-west-2" + + +@dataclass +class MockFeatureView: + name: str + + +@pytest.fixture +def repo_config(): + return RepoConfig( + registry=REGISTRY, + project=PROJECT, + provider=PROVIDER, + online_store=DynamoDBOnlineStoreConfig(region=REGION), + offline_store=FileOfflineStoreConfig(), + ) + + +def test_online_store_config_default(): + """Test DynamoDBOnlineStoreConfig default parameters.""" + aws_region = "us-west-2" + dynamodb_store_config = DynamoDBOnlineStoreConfig(region=aws_region) + assert dynamodb_store_config.type == "dynamodb" + assert dynamodb_store_config.batch_size == 40 + assert dynamodb_store_config.endpoint_url is None + assert dynamodb_store_config.region == aws_region + assert dynamodb_store_config.table_name_template == "{project}.{table_name}" + + +def test_dynamodb_table_default_params(): + """Test DynamoDBTable default parameters.""" + tbl_name = "dynamodb-test" + aws_region = "us-west-2" + dynamodb_table = DynamoDBTable(tbl_name, aws_region) + assert dynamodb_table.name == tbl_name + assert dynamodb_table.region == aws_region + assert dynamodb_table.endpoint_url is None + assert dynamodb_table._dynamodb_client is None + assert dynamodb_table._dynamodb_resource is None + + +def test_online_store_config_custom_params(): + """Test DynamoDBOnlineStoreConfig custom parameters.""" + aws_region = "us-west-2" + batch_size = 20 + endpoint_url = "http://localhost:8000" + table_name_template = "feast_test.dynamodb_table" + dynamodb_store_config = DynamoDBOnlineStoreConfig( + region=aws_region, + batch_size=batch_size, + endpoint_url=endpoint_url, + table_name_template=table_name_template, + ) + assert dynamodb_store_config.type == "dynamodb" + assert dynamodb_store_config.batch_size == batch_size + assert dynamodb_store_config.endpoint_url == endpoint_url + assert dynamodb_store_config.region == aws_region + assert dynamodb_store_config.table_name_template == table_name_template + + +def test_dynamodb_table_custom_params(): + """Test DynamoDBTable custom parameters.""" + tbl_name = "dynamodb-test" + aws_region = "us-west-2" + endpoint_url = "http://localhost:8000" + dynamodb_table = DynamoDBTable(tbl_name, aws_region, endpoint_url) + assert dynamodb_table.name == tbl_name + assert dynamodb_table.region == aws_region + assert dynamodb_table.endpoint_url == endpoint_url + assert dynamodb_table._dynamodb_client is None + assert dynamodb_table._dynamodb_resource is None + + +def test_online_store_config_dynamodb_client(): + """Test DynamoDBOnlineStoreConfig configure DynamoDB client with endpoint_url.""" + aws_region = "us-west-2" + endpoint_url = "http://localhost:8000" + dynamodb_store = DynamoDBOnlineStore() + dynamodb_store_config = DynamoDBOnlineStoreConfig( + region=aws_region, endpoint_url=endpoint_url + ) + dynamodb_client = dynamodb_store._get_dynamodb_client( + dynamodb_store_config.region, dynamodb_store_config.endpoint_url + ) + assert dynamodb_client.meta.region_name == aws_region + assert dynamodb_client.meta.endpoint_url == endpoint_url + + +def test_dynamodb_table_dynamodb_client(): + """Test DynamoDBTable configure DynamoDB client with endpoint_url.""" + tbl_name = "dynamodb-test" + aws_region = "us-west-2" + endpoint_url = "http://localhost:8000" + dynamodb_table = DynamoDBTable(tbl_name, aws_region, endpoint_url) + dynamodb_client = dynamodb_table._get_dynamodb_client( + dynamodb_table.region, dynamodb_table.endpoint_url + ) + assert dynamodb_client.meta.region_name == aws_region + assert dynamodb_client.meta.endpoint_url == endpoint_url + + +def test_online_store_config_dynamodb_resource(): + """Test DynamoDBOnlineStoreConfig configure DynamoDB Resource with endpoint_url.""" + aws_region = "us-west-2" + endpoint_url = "http://localhost:8000" + dynamodb_store = DynamoDBOnlineStore() + dynamodb_store_config = DynamoDBOnlineStoreConfig( + region=aws_region, endpoint_url=endpoint_url + ) + dynamodb_resource = dynamodb_store._get_dynamodb_resource( + dynamodb_store_config.region, dynamodb_store_config.endpoint_url + ) + assert dynamodb_resource.meta.client.meta.region_name == aws_region + assert dynamodb_resource.meta.client.meta.endpoint_url == endpoint_url + + +def test_dynamodb_table_dynamodb_resource(): + """Test DynamoDBTable configure DynamoDB resource with endpoint_url.""" + tbl_name = "dynamodb-test" + aws_region = "us-west-2" + endpoint_url = "http://localhost:8000" + dynamodb_table = DynamoDBTable(tbl_name, aws_region, endpoint_url) + dynamodb_resource = dynamodb_table._get_dynamodb_resource( + dynamodb_table.region, dynamodb_table.endpoint_url + ) + assert dynamodb_resource.meta.client.meta.region_name == aws_region + assert dynamodb_resource.meta.client.meta.endpoint_url == endpoint_url + + +@mock_dynamodb2 +@pytest.mark.parametrize("n_samples", [5, 50, 100]) +def test_online_read(repo_config, n_samples): + """Test DynamoDBOnlineStore online_read method.""" + _create_test_table(PROJECT, f"{TABLE_NAME}_{n_samples}", REGION) + data = _create_n_customer_test_samples(n=n_samples) + _insert_data_test_table(data, PROJECT, f"{TABLE_NAME}_{n_samples}", REGION) + + entity_keys, features, *rest = zip(*data) + dynamodb_store = DynamoDBOnlineStore() + returned_items = dynamodb_store.online_read( + config=repo_config, + table=MockFeatureView(name=f"{TABLE_NAME}_{n_samples}"), + entity_keys=entity_keys, + ) + assert len(returned_items) == len(data) + assert [item[1] for item in returned_items] == list(features) + + +@mock_dynamodb2 +def test_online_read_unknown_entity(repo_config): + """Test DynamoDBOnlineStore online_read method.""" + n_samples = 2 + _create_test_table(PROJECT, f"{TABLE_NAME}_{n_samples}", REGION) + data = _create_n_customer_test_samples(n=n_samples) + _insert_data_test_table(data, PROJECT, f"{TABLE_NAME}_{n_samples}", REGION) + + entity_keys, features, *rest = zip(*data) + # Append a nonsensical entity to search for + entity_keys = list(entity_keys) + features = list(features) + dynamodb_store = DynamoDBOnlineStore() + + # Have the unknown entity be in the beginning, middle, and end of the list of entities. + for pos in range(len(entity_keys)): + entity_keys_with_unknown = deepcopy(entity_keys) + entity_keys_with_unknown.insert( + pos, + EntityKeyProto( + join_keys=["customer"], entity_values=[ValueProto(string_val="12359")] + ), + ) + features_with_none = deepcopy(features) + features_with_none.insert(pos, None) + returned_items = dynamodb_store.online_read( + config=repo_config, + table=MockFeatureView(name=f"{TABLE_NAME}_{n_samples}"), + entity_keys=entity_keys_with_unknown, + ) + assert len(returned_items) == len(entity_keys_with_unknown) + assert [item[1] for item in returned_items] == list(features_with_none) + # The order should match the original entity key order + assert returned_items[pos] == (None, None) + + +@mock_dynamodb2 +def test_write_batch_non_duplicates(repo_config): + """Test DynamoDBOnline Store deduplicate write batch request items.""" + dynamodb_tbl = f"{TABLE_NAME}_batch_non_duplicates" + _create_test_table(PROJECT, dynamodb_tbl, REGION) + data = _create_n_customer_test_samples() + data_duplicate = deepcopy(data) + dynamodb_resource = boto3.resource("dynamodb", region_name=REGION) + table_instance = dynamodb_resource.Table(f"{PROJECT}.{dynamodb_tbl}") + dynamodb_store = DynamoDBOnlineStore() + # Insert duplicate data + dynamodb_store._write_batch_non_duplicates( + table_instance, data + data_duplicate, progress=None + ) + # Request more items than inserted + response = table_instance.scan(Limit=20) + returned_items = response.get("Items", None) + assert returned_items is not None + assert len(returned_items) == len(data) diff --git a/sdk/python/tests/unit/infra/test_provider.py b/sdk/python/tests/unit/infra/test_provider.py new file mode 100644 index 0000000000..43c09760e9 --- /dev/null +++ b/sdk/python/tests/unit/infra/test_provider.py @@ -0,0 +1,48 @@ +# Copyright 2020 The Feast Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from datetime import timedelta + +from feast import BigQuerySource +from feast.entity import Entity +from feast.feature_view import FeatureView +from feast.field import Field +from feast.infra.provider import _get_column_names +from feast.types import String +from feast.value_type import ValueType + + +def test_get_column_names_preserves_feature_ordering(): + entity = Entity("my-entity", description="My entity", value_type=ValueType.STRING) + fv = FeatureView( + name="my-fv", + entities=["my-entity"], + ttl=timedelta(days=1), + batch_source=BigQuerySource(table="non-existent-mock"), + schema=[ + Field(name="a", dtype=String), + Field(name="b", dtype=String), + Field(name="c", dtype=String), + Field(name="d", dtype=String), + Field(name="e", dtype=String), + Field(name="f", dtype=String), + Field(name="g", dtype=String), + Field(name="h", dtype=String), + Field(name="i", dtype=String), + Field(name="j", dtype=String), + ], + ) + + _, feature_list, _, _ = _get_column_names(fv, [entity]) + assert feature_list == ["a", "b", "c", "d", "e", "f", "g", "h", "i", "j"] diff --git a/sdk/python/tests/unit/test_data_sources.py b/sdk/python/tests/unit/test_data_sources.py new file mode 100644 index 0000000000..7f288d36db --- /dev/null +++ b/sdk/python/tests/unit/test_data_sources.py @@ -0,0 +1,255 @@ +import pytest + +from feast import ValueType +from feast.data_format import ProtoFormat +from feast.data_source import ( + DataSource, + KafkaSource, + KinesisSource, + PushSource, + RequestDataSource, + RequestSource, +) +from feast.field import Field +from feast.infra.offline_stores.bigquery_source import BigQuerySource +from feast.infra.offline_stores.file_source import FileSource +from feast.infra.offline_stores.redshift_source import RedshiftSource +from feast.infra.offline_stores.snowflake_source import SnowflakeSource +from feast.types import Bool, Float32, Int64 + + +def test_push_with_batch(): + push_source = PushSource( + name="test", batch_source=BigQuerySource(table="test.test"), + ) + push_source_proto = push_source.to_proto() + assert push_source_proto.HasField("batch_source") + + push_source_unproto = PushSource.from_proto(push_source_proto) + + assert push_source.name == push_source_unproto.name + assert push_source.batch_source.name == push_source_unproto.batch_source.name + + +def test_request_data_source_deprecation(): + with pytest.warns(DeprecationWarning): + request_data_source = RequestDataSource( + name="vals_to_add", + schema={"val_to_add": ValueType.INT64, "val_to_add_2": ValueType.INT64}, + ) + request_data_source_proto = request_data_source.to_proto() + returned_request_source = RequestSource.from_proto(request_data_source_proto) + assert returned_request_source == request_data_source + + +def test_request_source_primitive_type_to_proto(): + schema = [ + Field(name="f1", dtype=Float32), + Field(name="f2", dtype=Bool), + ] + request_source = RequestSource( + name="source", schema=schema, description="desc", tags={}, owner="feast", + ) + request_proto = request_source.to_proto() + deserialized_request_source = RequestSource.from_proto(request_proto) + assert deserialized_request_source == request_source + + +def test_hash(): + push_source_1 = PushSource( + name="test", batch_source=BigQuerySource(table="test.test"), + ) + push_source_2 = PushSource( + name="test", batch_source=BigQuerySource(table="test.test"), + ) + push_source_3 = PushSource( + name="test", batch_source=BigQuerySource(table="test.test2"), + ) + push_source_4 = PushSource( + name="test", + batch_source=BigQuerySource(table="test.test2"), + description="test", + ) + + s1 = {push_source_1, push_source_2} + assert len(s1) == 1 + + s2 = {push_source_1, push_source_3} + assert len(s2) == 2 + + s3 = {push_source_3, push_source_4} + assert len(s3) == 2 + + s4 = {push_source_1, push_source_2, push_source_3, push_source_4} + assert len(s4) == 3 + + +# TODO(kevjumba): Remove this test in feast 0.23 when positional arguments are removed. +def test_default_data_source_kw_arg_warning(): + # source_class = request.param + with pytest.warns(DeprecationWarning): + source = KafkaSource( + "name", "column", "bootstrap_servers", ProtoFormat("class_path"), "topic" + ) + assert source.name == "name" + assert source.timestamp_field == "column" + assert source.kafka_options.bootstrap_servers == "bootstrap_servers" + assert source.kafka_options.topic == "topic" + with pytest.raises(ValueError): + KafkaSource("name", "column", "bootstrap_servers", topic="topic") + + with pytest.warns(DeprecationWarning): + source = KinesisSource( + "name", + "column", + "c_column", + ProtoFormat("class_path"), + "region", + "stream_name", + ) + assert source.name == "name" + assert source.timestamp_field == "column" + assert source.created_timestamp_column == "c_column" + assert source.kinesis_options.region == "region" + assert source.kinesis_options.stream_name == "stream_name" + + with pytest.raises(ValueError): + KinesisSource( + "name", "column", "c_column", region="region", stream_name="stream_name" + ) + + with pytest.warns(DeprecationWarning): + source = RequestSource( + "name", [Field(name="val_to_add", dtype=Int64)], description="description" + ) + assert source.name == "name" + assert source.description == "description" + + with pytest.raises(ValueError): + RequestSource("name") + + with pytest.warns(DeprecationWarning): + source = PushSource( + "name", + BigQuerySource(name="bigquery_source", table="table"), + description="description", + ) + assert source.name == "name" + assert source.description == "description" + assert source.batch_source.name == "bigquery_source" + + with pytest.raises(ValueError): + PushSource("name") + + # No name warning for DataSource + with pytest.warns(UserWarning): + source = KafkaSource( + timestamp_field="column", + bootstrap_servers="bootstrap_servers", + message_format=ProtoFormat("class_path"), + topic="topic", + ) + + +def test_proto_conversion(): + bigquery_source = BigQuerySource( + name="test_source", + table="test_table", + timestamp_field="event_timestamp", + created_timestamp_column="created_timestamp", + field_mapping={"foo": "bar"}, + description="test description", + tags={"test": "test"}, + owner="test@gmail.com", + ) + + file_source = FileSource( + name="test_source", + path="test_path", + timestamp_field="event_timestamp", + created_timestamp_column="created_timestamp", + field_mapping={"foo": "bar"}, + description="test description", + tags={"test": "test"}, + owner="test@gmail.com", + ) + + redshift_source = RedshiftSource( + name="test_source", + database="test_database", + schema="test_schema", + table="test_table", + timestamp_field="event_timestamp", + created_timestamp_column="created_timestamp", + field_mapping={"foo": "bar"}, + description="test description", + tags={"test": "test"}, + owner="test@gmail.com", + ) + + snowflake_source = SnowflakeSource( + name="test_source", + database="test_database", + warehouse="test_warehouse", + schema="test_schema", + table="test_table", + timestamp_field="event_timestamp", + created_timestamp_column="created_timestamp", + field_mapping={"foo": "bar"}, + description="test description", + tags={"test": "test"}, + owner="test@gmail.com", + ) + + kafka_source = KafkaSource( + name="test_source", + bootstrap_servers="test_servers", + message_format=ProtoFormat("class_path"), + topic="test_topic", + timestamp_field="event_timestamp", + created_timestamp_column="created_timestamp", + field_mapping={"foo": "bar"}, + description="test description", + tags={"test": "test"}, + owner="test@gmail.com", + batch_source=file_source, + ) + + kinesis_source = KinesisSource( + name="test_source", + region="test_region", + record_format=ProtoFormat("class_path"), + stream_name="test_stream", + timestamp_field="event_timestamp", + created_timestamp_column="created_timestamp", + field_mapping={"foo": "bar"}, + description="test description", + tags={"test": "test"}, + owner="test@gmail.com", + batch_source=file_source, + ) + + push_source = PushSource( + name="test_source", + batch_source=file_source, + description="test description", + tags={"test": "test"}, + owner="test@gmail.com", + ) + + request_source = RequestSource( + name="test_source", + schema=[Field(name="test1", dtype=Float32), Field(name="test1", dtype=Int64)], + description="test description", + tags={"test": "test"}, + owner="test@gmail.com", + ) + + assert DataSource.from_proto(bigquery_source.to_proto()) == bigquery_source + assert DataSource.from_proto(file_source.to_proto()) == file_source + assert DataSource.from_proto(redshift_source.to_proto()) == redshift_source + assert DataSource.from_proto(snowflake_source.to_proto()) == snowflake_source + assert DataSource.from_proto(kafka_source.to_proto()) == kafka_source + assert DataSource.from_proto(kinesis_source.to_proto()) == kinesis_source + assert DataSource.from_proto(push_source.to_proto()) == push_source + assert DataSource.from_proto(request_source.to_proto()) == request_source diff --git a/sdk/python/tests/unit/test_entity.py b/sdk/python/tests/unit/test_entity.py index b8381451fd..254a975f67 100644 --- a/sdk/python/tests/unit/test_entity.py +++ b/sdk/python/tests/unit/test_entity.py @@ -11,32 +11,74 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import assertpy +import pytest from feast.entity import Entity from feast.value_type import ValueType def test_join_key_default(): - entity = Entity("my-entity", description="My entity", value_type=ValueType.STRING) + with pytest.deprecated_call(): + entity = Entity( + "my-entity", description="My entity", value_type=ValueType.STRING + ) assert entity.join_key == "my-entity" -def test_entity_class_contains_labels(): - entity = Entity( - "my-entity", - description="My entity", - value_type=ValueType.STRING, - labels={"key1": "val1", "key2": "val2"}, - ) - assert "key1" in entity.labels.keys() and entity.labels["key1"] == "val1" - assert "key2" in entity.labels.keys() and entity.labels["key2"] == "val2" +def test_entity_class_contains_tags(): + with pytest.deprecated_call(): + entity = Entity( + "my-entity", + description="My entity", + value_type=ValueType.STRING, + tags={"key1": "val1", "key2": "val2"}, + ) + assert "key1" in entity.tags.keys() and entity.tags["key1"] == "val1" + assert "key2" in entity.tags.keys() and entity.tags["key2"] == "val2" -def test_entity_without_labels_empty_dict(): - entity = Entity("my-entity", description="My entity", value_type=ValueType.STRING) - assert entity.labels == dict() - assert len(entity.labels) == 0 +def test_entity_without_tags_empty_dict(): + with pytest.deprecated_call(): + entity = Entity( + "my-entity", description="My entity", value_type=ValueType.STRING + ) + assert entity.tags == dict() + assert len(entity.tags) == 0 def test_entity_without_description(): - Entity("my-entity", value_type=ValueType.STRING) + with pytest.deprecated_call(): + Entity("my-entity", value_type=ValueType.STRING) + + +def test_name_not_specified(): + assertpy.assert_that(lambda: Entity(value_type=ValueType.STRING)).raises(ValueError) + + +def test_multiple_args(): + assertpy.assert_that(lambda: Entity("a", ValueType.STRING)).raises(ValueError) + + +def test_name_keyword(recwarn): + Entity(name="my-entity", value_type=ValueType.STRING) + assert len(recwarn) == 0 + + +def test_hash(): + entity1 = Entity(name="my-entity", value_type=ValueType.STRING) + entity2 = Entity(name="my-entity", value_type=ValueType.STRING) + entity3 = Entity(name="my-entity", value_type=ValueType.FLOAT) + entity4 = Entity(name="my-entity", value_type=ValueType.FLOAT, description="test") + + s1 = {entity1, entity2} + assert len(s1) == 1 + + s2 = {entity1, entity3} + assert len(s2) == 2 + + s3 = {entity3, entity4} + assert len(s3) == 2 + + s4 = {entity1, entity2, entity3, entity4} + assert len(s4) == 3 diff --git a/sdk/python/tests/unit/test_feature_service.py b/sdk/python/tests/unit/test_feature_service.py new file mode 100644 index 0000000000..fc4fd70bcb --- /dev/null +++ b/sdk/python/tests/unit/test_feature_service.py @@ -0,0 +1,105 @@ +import pytest + +from feast.feature_service import FeatureService +from feast.feature_view import FeatureView +from feast.field import Field +from feast.infra.offline_stores.file_source import FileSource +from feast.types import Float32 + + +def test_feature_service_with_description(): + feature_service = FeatureService( + name="my-feature-service", features=[], description="a clear description" + ) + assert feature_service.to_proto().spec.description == "a clear description" + + +def test_feature_service_without_description(): + feature_service = FeatureService(name="my-feature-service", features=[]) + # + assert feature_service.to_proto().spec.description == "" + + +def test_hash(): + file_source = FileSource(name="my-file-source", path="test.parquet") + feature_view = FeatureView( + name="my-feature-view", + entities=[], + schema=[ + Field(name="feature1", dtype=Float32), + Field(name="feature2", dtype=Float32), + ], + source=file_source, + ) + feature_service_1 = FeatureService( + name="my-feature-service", features=[feature_view[["feature1", "feature2"]]] + ) + feature_service_2 = FeatureService( + name="my-feature-service", features=[feature_view[["feature1", "feature2"]]] + ) + feature_service_3 = FeatureService( + name="my-feature-service", features=[feature_view[["feature1"]]] + ) + feature_service_4 = FeatureService( + name="my-feature-service", + features=[feature_view[["feature1"]]], + description="test", + ) + + s1 = {feature_service_1, feature_service_2} + assert len(s1) == 1 + + s2 = {feature_service_1, feature_service_3} + assert len(s2) == 2 + + s3 = {feature_service_3, feature_service_4} + assert len(s3) == 2 + + s4 = {feature_service_1, feature_service_2, feature_service_3, feature_service_4} + assert len(s4) == 3 + + +def test_feature_view_kw_args_warning(): + with pytest.warns(DeprecationWarning): + service = FeatureService("name", [], tags={"tag_1": "tag"}, description="desc") + assert service.name == "name" + assert service.tags == {"tag_1": "tag"} + assert service.description == "desc" + + # More positional args than name and features + with pytest.raises(ValueError): + service = FeatureService("name", [], {"tag_1": "tag"}, "desc") + + # No name defined. + with pytest.raises(ValueError): + service = FeatureService(features=[], tags={"tag_1": "tag"}, description="desc") + + +def no_warnings(func): + def wrapper_no_warnings(*args, **kwargs): + with pytest.warns(None) as warnings: + func(*args, **kwargs) + + if len(warnings) > 0: + raise AssertionError( + "Warnings were raised: " + ", ".join([str(w) for w in warnings]) + ) + + return wrapper_no_warnings + + +@no_warnings +def test_feature_view_kw_args_normal(): + file_source = FileSource(name="my-file-source", path="test.parquet") + feature_view = FeatureView( + name="my-feature-view", + entities=[], + schema=[ + Field(name="feature1", dtype=Float32), + Field(name="feature2", dtype=Float32), + ], + source=file_source, + ) + _ = FeatureService( + name="my-feature-service", features=[feature_view[["feature1", "feature2"]]] + ) diff --git a/sdk/python/tests/unit/test_feature_validation.py b/sdk/python/tests/unit/test_feature_validation.py new file mode 100644 index 0000000000..b349eb8ea0 --- /dev/null +++ b/sdk/python/tests/unit/test_feature_validation.py @@ -0,0 +1,52 @@ +import pytest + +from feast.errors import FeatureNameCollisionError +from feast.feature_store import _validate_feature_refs + + +def test_feature_name_collision_on_historical_retrieval(): + # _validate_feature_refs is the function that checks for colliding feature names + # check when feature names collide and 'full_feature_names=False' + with pytest.raises(FeatureNameCollisionError) as error: + _validate_feature_refs( + feature_refs=[ + "driver_stats:conv_rate", + "driver_stats:avg_daily_trips", + "customer_profile:current_balance", + "customer_profile:avg_passenger_count", + "customer_profile:lifetime_trip_count", + "customer_profile:avg_daily_trips", + ], + full_feature_names=False, + ) + + expected_error_message = ( + "Duplicate features named avg_daily_trips found.\n" + "To resolve this collision, either use the full feature name by setting " + "'full_feature_names=True', or ensure that the features in question have different names." + ) + + assert str(error.value) == expected_error_message + + # check when feature names collide and 'full_feature_names=True' + with pytest.raises(FeatureNameCollisionError) as error: + _validate_feature_refs( + feature_refs=[ + "driver_stats:conv_rate", + "driver_stats:avg_daily_trips", + "driver_stats:avg_daily_trips", + "customer_profile:current_balance", + "customer_profile:avg_passenger_count", + "customer_profile:lifetime_trip_count", + "customer_profile:avg_daily_trips", + ], + full_feature_names=True, + ) + + expected_error_message = ( + "Duplicate features named driver_stats__avg_daily_trips found.\n" + "To resolve this collision, please ensure that the feature views or their own features " + "have different names. If you're intentionally joining the same feature view twice on " + "different sets of entities, please rename one of the feature views with '.with_name'." + ) + assert str(error.value) == expected_error_message diff --git a/sdk/python/tests/unit/test_feature_view.py b/sdk/python/tests/unit/test_feature_view.py new file mode 100644 index 0000000000..80a583806e --- /dev/null +++ b/sdk/python/tests/unit/test_feature_view.py @@ -0,0 +1,64 @@ +# Copyright 2022 The Feast Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from feast.feature_view import FeatureView +from feast.field import Field +from feast.infra.offline_stores.file_source import FileSource +from feast.types import Float32 + + +def test_hash(): + file_source = FileSource(name="my-file-source", path="test.parquet") + feature_view_1 = FeatureView( + name="my-feature-view", + entities=[], + schema=[ + Field(name="feature1", dtype=Float32), + Field(name="feature2", dtype=Float32), + ], + source=file_source, + ) + feature_view_2 = FeatureView( + name="my-feature-view", + entities=[], + schema=[ + Field(name="feature1", dtype=Float32), + Field(name="feature2", dtype=Float32), + ], + source=file_source, + ) + feature_view_3 = FeatureView( + name="my-feature-view", + entities=[], + schema=[Field(name="feature1", dtype=Float32)], + source=file_source, + ) + feature_view_4 = FeatureView( + name="my-feature-view", + entities=[], + schema=[Field(name="feature1", dtype=Float32)], + source=file_source, + description="test", + ) + + s1 = {feature_view_1, feature_view_2} + assert len(s1) == 1 + + s2 = {feature_view_1, feature_view_3} + assert len(s2) == 2 + + s3 = {feature_view_3, feature_view_4} + assert len(s3) == 2 + + s4 = {feature_view_1, feature_view_2, feature_view_3, feature_view_4} + assert len(s4) == 3 diff --git a/sdk/python/tests/unit/test_on_demand_feature_view.py b/sdk/python/tests/unit/test_on_demand_feature_view.py new file mode 100644 index 0000000000..9d45cfbb0b --- /dev/null +++ b/sdk/python/tests/unit/test_on_demand_feature_view.py @@ -0,0 +1,102 @@ +# Copyright 2022 The Feast Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import pandas as pd + +from feast.feature_view import FeatureView +from feast.field import Field +from feast.infra.offline_stores.file_source import FileSource +from feast.on_demand_feature_view import OnDemandFeatureView +from feast.types import Float32 + + +def udf1(features_df: pd.DataFrame) -> pd.DataFrame: + df = pd.DataFrame() + df["output1"] = features_df["feature1"] + df["output2"] = features_df["feature2"] + return df + + +def udf2(features_df: pd.DataFrame) -> pd.DataFrame: + df = pd.DataFrame() + df["output1"] = features_df["feature1"] + 100 + df["output2"] = features_df["feature2"] + 100 + return df + + +def test_hash(): + file_source = FileSource(name="my-file-source", path="test.parquet") + feature_view = FeatureView( + name="my-feature-view", + entities=[], + schema=[ + Field(name="feature1", dtype=Float32), + Field(name="feature2", dtype=Float32), + ], + source=file_source, + ) + sources = {"my-feature-view": feature_view} + on_demand_feature_view_1 = OnDemandFeatureView( + name="my-on-demand-feature-view", + sources=sources, + schema=[ + Field(name="output1", dtype=Float32), + Field(name="output2", dtype=Float32), + ], + udf=udf1, + ) + on_demand_feature_view_2 = OnDemandFeatureView( + name="my-on-demand-feature-view", + sources=sources, + schema=[ + Field(name="output1", dtype=Float32), + Field(name="output2", dtype=Float32), + ], + udf=udf1, + ) + on_demand_feature_view_3 = OnDemandFeatureView( + name="my-on-demand-feature-view", + sources=sources, + schema=[ + Field(name="output1", dtype=Float32), + Field(name="output2", dtype=Float32), + ], + udf=udf2, + ) + on_demand_feature_view_4 = OnDemandFeatureView( + name="my-on-demand-feature-view", + sources=sources, + schema=[ + Field(name="output1", dtype=Float32), + Field(name="output2", dtype=Float32), + ], + udf=udf2, + description="test", + ) + + s1 = {on_demand_feature_view_1, on_demand_feature_view_2} + assert len(s1) == 1 + + s2 = {on_demand_feature_view_1, on_demand_feature_view_3} + assert len(s2) == 2 + + s3 = {on_demand_feature_view_3, on_demand_feature_view_4} + assert len(s3) == 2 + + s4 = { + on_demand_feature_view_1, + on_demand_feature_view_2, + on_demand_feature_view_3, + on_demand_feature_view_4, + } + assert len(s4) == 3 diff --git a/sdk/python/tests/unit/test_proto_json.py b/sdk/python/tests/unit/test_proto_json.py new file mode 100644 index 0000000000..6bfdbbbf91 --- /dev/null +++ b/sdk/python/tests/unit/test_proto_json.py @@ -0,0 +1,108 @@ +import assertpy +import pytest +from google.protobuf.json_format import MessageToDict, Parse + +from feast import proto_json +from feast.protos.feast.serving.ServingService_pb2 import ( + FeatureList, + GetOnlineFeaturesResponse, +) +from feast.protos.feast.types.Value_pb2 import RepeatedValue + +FeatureVector = GetOnlineFeaturesResponse.FeatureVector + + +@pytest.fixture(scope="module") +def proto_json_patch(): + proto_json.patch() + + +def test_feature_vector_values(proto_json_patch): + # FeatureVector contains "repeated values" proto field. + # We want to test that feast.types.Value can take different types in JSON + # without using additional structure (e.g. 1 instead of {int64_val: 1}). + feature_vector_str = """{ + "values": [ + 1, + 2.0, + true, + "foo", + [1, 2, 3], + [2.0, 3.0, 4.0, null], + [true, false, true], + ["foo", "bar", "foobar"] + ] + }""" + feature_vector_proto = FeatureVector() + Parse(feature_vector_str, feature_vector_proto) + assertpy.assert_that(len(feature_vector_proto.values)).is_equal_to(8) + assertpy.assert_that(feature_vector_proto.values[0].int64_val).is_equal_to(1) + assertpy.assert_that(feature_vector_proto.values[1].double_val).is_equal_to(2.0) + assertpy.assert_that(feature_vector_proto.values[2].bool_val).is_equal_to(True) + assertpy.assert_that(feature_vector_proto.values[3].string_val).is_equal_to("foo") + assertpy.assert_that(feature_vector_proto.values[4].int64_list_val.val).is_equal_to( + [1, 2, 3] + ) + # Can't directly check equality to [2.0, 3.0, 4.0, float("nan")], because float("nan") != float("nan") + assertpy.assert_that( + feature_vector_proto.values[5].double_list_val.val[:3] + ).is_equal_to([2.0, 3.0, 4.0]) + assertpy.assert_that(feature_vector_proto.values[5].double_list_val.val[3]).is_nan() + assertpy.assert_that(feature_vector_proto.values[6].bool_list_val.val).is_equal_to( + [True, False, True] + ) + assertpy.assert_that( + feature_vector_proto.values[7].string_list_val.val + ).is_equal_to(["foo", "bar", "foobar"]) + + # Now convert protobuf back to json and check that + feature_vector_json = MessageToDict(feature_vector_proto) + assertpy.assert_that(len(feature_vector_json["values"])).is_equal_to(8) + assertpy.assert_that(feature_vector_json["values"][0]).is_equal_to(1) + assertpy.assert_that(feature_vector_json["values"][1]).is_equal_to(2.0) + assertpy.assert_that(feature_vector_json["values"][2]).is_equal_to(True) + assertpy.assert_that(feature_vector_json["values"][3]).is_equal_to("foo") + assertpy.assert_that(feature_vector_json["values"][4]).is_equal_to([1, 2, 3]) + # Can't directly check equality to [2.0, 3.0, 4.0, float("nan")], because float("nan") != float("nan") + assertpy.assert_that(feature_vector_json["values"][5][:3]).is_equal_to( + [2.0, 3.0, 4.0] + ) + assertpy.assert_that(feature_vector_json["values"][5][3]).is_nan() + assertpy.assert_that(feature_vector_json["values"][6]).is_equal_to( + [True, False, True] + ) + assertpy.assert_that(feature_vector_json["values"][7]).is_equal_to( + ["foo", "bar", "foobar"] + ) + + +def test_feast_repeated_value(proto_json_patch): + # Make sure that RepeatedValue in JSON does not need the + # additional structure (e.g. [1,2,3] instead of {"val": [1,2,3]}) + repeated_value_str = "[1,2,3]" + repeated_value_proto = RepeatedValue() + Parse(repeated_value_str, repeated_value_proto) + assertpy.assert_that(len(repeated_value_proto.val)).is_equal_to(3) + assertpy.assert_that(repeated_value_proto.val[0].int64_val).is_equal_to(1) + assertpy.assert_that(repeated_value_proto.val[1].int64_val).is_equal_to(2) + assertpy.assert_that(repeated_value_proto.val[2].int64_val).is_equal_to(3) + # Now convert protobuf back to json and check that + repeated_value_json = MessageToDict(repeated_value_proto) + assertpy.assert_that(repeated_value_json).is_equal_to([1, 2, 3]) + + +def test_feature_list(proto_json_patch): + # Make sure that FeatureList in JSON does not need the additional structure + # (e.g. ["foo", "bar"] instead of {"val": ["foo", "bar"]}) + feature_list_str = '["feature-a", "feature-b", "feature-c"]' + feature_list_proto = FeatureList() + Parse(feature_list_str, feature_list_proto) + assertpy.assert_that(len(feature_list_proto.val)).is_equal_to(3) + assertpy.assert_that(feature_list_proto.val[0]).is_equal_to("feature-a") + assertpy.assert_that(feature_list_proto.val[1]).is_equal_to("feature-b") + assertpy.assert_that(feature_list_proto.val[2]).is_equal_to("feature-c") + # Now convert protobuf back to json and check that + feature_list_json = MessageToDict(feature_list_proto) + assertpy.assert_that(feature_list_json).is_equal_to( + ["feature-a", "feature-b", "feature-c"] + ) diff --git a/sdk/python/tests/unit/test_types.py b/sdk/python/tests/unit/test_types.py new file mode 100644 index 0000000000..af490b4f3a --- /dev/null +++ b/sdk/python/tests/unit/test_types.py @@ -0,0 +1,34 @@ +import pytest + +from feast.types import Array, Float32, String, from_value_type +from feast.value_type import ValueType + + +def test_primitive_feast_type(): + assert String.to_value_type() == ValueType.STRING + assert from_value_type(String.to_value_type()) == String + assert Float32.to_value_type() == ValueType.FLOAT + assert from_value_type(Float32.to_value_type()) == Float32 + + +def test_array_feast_type(): + array_string = Array(String) + assert array_string.to_value_type() == ValueType.STRING_LIST + assert from_value_type(array_string.to_value_type()) == array_string + + array_float_32 = Array(Float32) + assert array_float_32.to_value_type() == ValueType.FLOAT_LIST + assert from_value_type(array_float_32.to_value_type()) == array_float_32 + + with pytest.raises(ValueError): + _ = Array(Array) + + with pytest.raises(ValueError): + _ = Array(Array(String)) + + +def test_all_value_types(): + for value in ValueType: + # We do not support the NULL type. + if value != ValueType.NULL: + assert from_value_type(value).to_value_type() == value diff --git a/sdk/python/tests/unit/test_unit_feature_store.py b/sdk/python/tests/unit/test_unit_feature_store.py new file mode 100644 index 0000000000..6f9dd6acb0 --- /dev/null +++ b/sdk/python/tests/unit/test_unit_feature_store.py @@ -0,0 +1,50 @@ +from dataclasses import dataclass +from typing import Dict, List + +from feast import FeatureStore +from feast.protos.feast.types.Value_pb2 import Value + + +@dataclass +class MockFeatureViewProjection: + join_key_map: Dict[str, str] + + +@dataclass +class MockFeatureView: + name: str + entities: List[str] + projection: MockFeatureViewProjection + + +def test__get_unique_entities(): + entity_values = { + "entity_1": [Value(int64_val=1), Value(int64_val=2), Value(int64_val=1)], + "entity_2": [ + Value(string_val="1"), + Value(string_val="2"), + Value(string_val="1"), + ], + "entity_3": [Value(int64_val=8), Value(int64_val=9), Value(int64_val=10)], + } + + entity_name_to_join_key_map = {"entity_1": "entity_1", "entity_2": "entity_2"} + + fv = MockFeatureView( + name="fv_1", + entities=["entity_1", "entity_2"], + projection=MockFeatureViewProjection(join_key_map={}), + ) + + unique_entities, indexes = FeatureStore._get_unique_entities( + FeatureStore, + table=fv, + join_key_values=entity_values, + entity_name_to_join_key_map=entity_name_to_join_key_map, + ) + + assert unique_entities == ( + {"entity_1": Value(int64_val=1), "entity_2": Value(string_val="1")}, + {"entity_1": Value(int64_val=2), "entity_2": Value(string_val="2")}, + ) + assert indexes == ([0, 2], [1]) diff --git a/sdk/python/tests/unit/test_usage.py b/sdk/python/tests/unit/test_usage.py new file mode 100644 index 0000000000..13988d3264 --- /dev/null +++ b/sdk/python/tests/unit/test_usage.py @@ -0,0 +1,237 @@ +import datetime +import json +import time +from unittest.mock import patch + +import pytest + +from feast.usage import ( + RatioSampler, + log_exceptions, + log_exceptions_and_usage, + set_usage_attribute, + tracing_span, +) + + +@pytest.fixture(scope="function") +def dummy_exporter(): + event_log = [] + + with patch( + "feast.usage._export", + new=lambda e: event_log.append(json.loads(json.dumps(e))), + ): + yield event_log + + +@pytest.fixture(scope="function", autouse=True) +def enabling_patch(): + with patch("feast.usage._is_enabled") as p: + p.__bool__.return_value = True + yield p + + +def test_logging_disabled(dummy_exporter, enabling_patch): + enabling_patch.__bool__.return_value = False + + @log_exceptions_and_usage(event="test-event") + def entrypoint(): + pass + + @log_exceptions(event="test-event") + def entrypoint2(): + raise ValueError(1) + + entrypoint() + with pytest.raises(ValueError): + entrypoint2() + + assert not dummy_exporter + + +def test_global_context_building(dummy_exporter): + @log_exceptions_and_usage(event="test-event") + def entrypoint(provider): + if provider == "one": + provider_one() + if provider == "two": + provider_two() + + @log_exceptions_and_usage(provider="provider-one") + def provider_one(): + dummy_layer() + + @log_exceptions_and_usage(provider="provider-two") + def provider_two(): + set_usage_attribute("new-attr", "new-val") + + @log_exceptions_and_usage + def dummy_layer(): + redis_store() + + @log_exceptions_and_usage(store="redis") + def redis_store(): + set_usage_attribute("attr", "val") + + entrypoint(provider="one") + entrypoint(provider="two") + + scope_name = "test_usage.test_global_context_building." + + assert dummy_exporter + assert { + "event": "test-event", + "provider": "provider-one", + "store": "redis", + "attr": "val", + "entrypoint": f"{scope_name}.entrypoint", + }.items() <= dummy_exporter[0].items() + assert dummy_exporter[0]["calls"][0]["fn_name"] == f"{scope_name}.entrypoint" + assert dummy_exporter[0]["calls"][1]["fn_name"] == f"{scope_name}.provider_one" + assert dummy_exporter[0]["calls"][2]["fn_name"] == f"{scope_name}.dummy_layer" + assert dummy_exporter[0]["calls"][3]["fn_name"] == f"{scope_name}.redis_store" + + assert ( + not {"store", "attr"} & dummy_exporter[1].keys() + ) # check that context was reset + assert { + "event": "test-event", + "provider": "provider-two", + "new-attr": "new-val", + }.items() <= dummy_exporter[1].items() + + +def test_exception_recording(dummy_exporter): + @log_exceptions_and_usage(event="test-event") + def entrypoint(): + provider() + + @log_exceptions_and_usage(provider="provider-one") + def provider(): + raise ValueError(1) + + with pytest.raises(ValueError): + entrypoint() + + assert dummy_exporter + assert { + "event": "test-event", + "provider": "provider-one", + "exception": repr(ValueError(1)), + "entrypoint": "test_usage.test_exception_recording..entrypoint", + }.items() <= dummy_exporter[0].items() + + +def test_only_exception_logging(dummy_exporter): + @log_exceptions(scope="exception-only") + def failing_fn(): + raise ValueError(1) + + @log_exceptions_and_usage(scope="usage-and-exception") + def entrypoint(): + failing_fn() + + with pytest.raises(ValueError): + failing_fn() + + assert { + "exception": repr(ValueError(1)), + "scope": "exception-only", + "entrypoint": "test_usage.test_only_exception_logging..failing_fn", + }.items() <= dummy_exporter[0].items() + + with pytest.raises(ValueError): + entrypoint() + + assert { + "exception": repr(ValueError(1)), + "scope": "usage-and-exception", + "entrypoint": "test_usage.test_only_exception_logging..entrypoint", + }.items() <= dummy_exporter[1].items() + + +def test_ratio_based_sampling(dummy_exporter): + @log_exceptions_and_usage() + def entrypoint(): + expensive_fn() + + @log_exceptions_and_usage(sampler=RatioSampler(ratio=0.1)) + def expensive_fn(): + pass + + for _ in range(100): + entrypoint() + + assert len(dummy_exporter) == 10 + + +def test_sampling_priority(dummy_exporter): + @log_exceptions_and_usage(sampler=RatioSampler(ratio=0.3)) + def entrypoint(): + expensive_fn() + + @log_exceptions_and_usage(sampler=RatioSampler(ratio=0.01)) + def expensive_fn(): + other_fn() + + @log_exceptions_and_usage(sampler=RatioSampler(ratio=0.1)) + def other_fn(): + pass + + for _ in range(300): + entrypoint() + + assert len(dummy_exporter) == 3 + + +def test_time_recording(dummy_exporter): + @log_exceptions_and_usage() + def entrypoint(): + time.sleep(0.1) + expensive_fn() + + @log_exceptions_and_usage() + def expensive_fn(): + time.sleep(0.5) + other_fn() + + @log_exceptions_and_usage() + def other_fn(): + time.sleep(0.2) + + entrypoint() + + assert dummy_exporter + calls = dummy_exporter[0]["calls"] + assert call_length_ms(calls[0]) >= 800 + assert call_length_ms(calls[0]) > call_length_ms(calls[1]) >= 700 + assert call_length_ms(calls[1]) > call_length_ms(calls[2]) >= 200 + + +def test_profiling_decorator(dummy_exporter): + @log_exceptions_and_usage() + def entrypoint(): + with tracing_span("custom_span"): + time.sleep(0.1) + + entrypoint() + + assert dummy_exporter + + calls = dummy_exporter[0]["calls"] + assert len(calls) + assert call_length_ms(calls[0]) >= 100 + assert call_length_ms(calls[1]) >= 100 + + assert ( + calls[1]["fn_name"] + == "test_usage.test_profiling_decorator..entrypoint.custom_span" + ) + + +def call_length_ms(call): + return ( + datetime.datetime.fromisoformat(call["end"]) + - datetime.datetime.fromisoformat(call["start"]) + ).total_seconds() * 10 ** 3 diff --git a/sdk/python/tests/utils/data_source_utils.py b/sdk/python/tests/utils/data_source_utils.py index 3c25a697b7..d5f45964ca 100644 --- a/sdk/python/tests/utils/data_source_utils.py +++ b/sdk/python/tests/utils/data_source_utils.py @@ -1,5 +1,8 @@ import contextlib +import random import tempfile +import time +from typing import Iterator from google.cloud import bigquery @@ -8,24 +11,20 @@ @contextlib.contextmanager -def prep_file_source(df, event_timestamp_column=None) -> FileSource: +def prep_file_source(df, timestamp_field=None) -> Iterator[FileSource]: with tempfile.NamedTemporaryFile(suffix=".parquet") as f: f.close() df.to_parquet(f.name) file_source = FileSource( - file_format=ParquetFormat(), - file_url=f.name, - event_timestamp_column=event_timestamp_column, + file_format=ParquetFormat(), path=f.name, timestamp_field=timestamp_field, ) yield file_source -def simple_bq_source_using_table_ref_arg( - df, event_timestamp_column=None -) -> BigQuerySource: +def simple_bq_source_using_table_arg(df, timestamp_field=None) -> BigQuerySource: client = bigquery.Client() gcp_project = client.project - bigquery_dataset = "ds" + bigquery_dataset = f"ds_{time.time_ns()}" dataset = bigquery.Dataset(f"{gcp_project}.{bigquery_dataset}") client.create_dataset(dataset, exists_ok=True) dataset.default_table_expiration_ms = ( @@ -34,23 +33,18 @@ def simple_bq_source_using_table_ref_arg( * 60 # 60 minutes in milliseconds (seems to be minimum limit for gcloud) ) client.update_dataset(dataset, ["default_table_expiration_ms"]) - table_ref = f"{gcp_project}.{bigquery_dataset}.table_1" + table = f"{gcp_project}.{bigquery_dataset}.table_{random.randrange(100, 999)}" - job = client.load_table_from_dataframe( - df, table_ref, job_config=bigquery.LoadJobConfig() - ) + job = client.load_table_from_dataframe(df, table) job.result() - return BigQuerySource( - table_ref=table_ref, event_timestamp_column=event_timestamp_column, - ) + return BigQuerySource(table=table, timestamp_field=timestamp_field,) -def simple_bq_source_using_query_arg(df, event_timestamp_column=None) -> BigQuerySource: - bq_source_using_table_ref = simple_bq_source_using_table_ref_arg( - df, event_timestamp_column - ) +def simple_bq_source_using_query_arg(df, timestamp_field=None) -> BigQuerySource: + bq_source_using_table = simple_bq_source_using_table_arg(df, timestamp_field) return BigQuerySource( - query=f"SELECT * FROM {bq_source_using_table_ref.table_ref}", - event_timestamp_column=event_timestamp_column, + name=bq_source_using_table.table, + query=f"SELECT * FROM {bq_source_using_table.table}", + timestamp_field=timestamp_field, ) diff --git a/sdk/python/tests/utils/online_read_write_test.py b/sdk/python/tests/utils/online_read_write_test.py index 99339534f4..39846cd2ad 100644 --- a/sdk/python/tests/utils/online_read_write_test.py +++ b/sdk/python/tests/utils/online_read_write_test.py @@ -1,25 +1,28 @@ from datetime import datetime, timedelta +from typing import Optional from feast.feature_store import FeatureStore from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto from feast.protos.feast.types.Value_pb2 import Value as ValueProto -def basic_rw_test(store: FeatureStore, view_name: str) -> None: +def basic_rw_test( + store: FeatureStore, view_name: str, feature_service_name: Optional[str] = None +) -> None: """ This is a provider-independent test suite for reading and writing from the online store, to - be used by provider-specific tests. + be used by provider-specific tests. """ table = store.get_feature_view(name=view_name) provider = store._get_provider() entity_key = EntityKeyProto( - join_keys=["driver"], entity_values=[ValueProto(int64_val=1)] + join_keys=["driver_id"], entity_values=[ValueProto(int64_val=1)] ) def _driver_rw_test(event_ts, created_ts, write, expect_read): - """ A helper function to write values and read them back """ + """A helper function to write values and read them back""" write_lat, write_lon = write expect_lat, expect_lon = expect_read provider.online_write_batch( @@ -39,13 +42,23 @@ def _driver_rw_test(event_ts, created_ts, write, expect_read): progress=None, ) - read_rows = provider.online_read( - config=store.config, table=table, entity_keys=[entity_key] - ) - assert len(read_rows) == 1 - _, val = read_rows[0] - assert val["lon"].string_val == expect_lon - assert abs(val["lat"].double_val - expect_lat) < 1e-6 + if feature_service_name: + entity_dict = {"driver_id": 1} + feature_service = store.get_feature_service(feature_service_name) + features = store.get_online_features( + features=feature_service, entity_rows=[entity_dict] + ).to_dict() + assert len(features["driver_id"]) == 1 + assert features["lon"][0] == expect_lon + assert abs(features["lat"][0] - expect_lat) < 1e-6 + else: + read_rows = provider.online_read( + config=store.config, table=table, entity_keys=[entity_key] + ) + assert len(read_rows) == 1 + _, val = read_rows[0] + assert val["lon"].string_val == expect_lon + assert abs(val["lat"].double_val - expect_lat) < 1e-6 """ 1. Basic test: write value, read it back """ @@ -54,17 +67,6 @@ def _driver_rw_test(event_ts, created_ts, write, expect_read): event_ts=time_1, created_ts=time_1, write=(1.1, "3.1"), expect_read=(1.1, "3.1") ) - # Note: This behavior has changed for performance. We should test that older - # value can't overwrite over a newer value once we add the respective flag - """ Values with an older event_ts should overwrite newer ones """ - time_2 = datetime.utcnow() - _driver_rw_test( - event_ts=time_1 - timedelta(hours=1), - created_ts=time_2, - write=(-1000, "OLD"), - expect_read=(-1000, "OLD"), - ) - """ Values with an new event_ts should overwrite older ones """ time_3 = datetime.utcnow() _driver_rw_test( @@ -73,21 +75,3 @@ def _driver_rw_test(event_ts, created_ts, write, expect_read): write=(1123, "NEWER"), expect_read=(1123, "NEWER"), ) - - # Note: This behavior has changed for performance. We should test that older - # value can't overwrite over a newer value once we add the respective flag - """ created_ts is used as a tie breaker, using older created_ts here, but we still overwrite """ - _driver_rw_test( - event_ts=time_1 + timedelta(hours=1), - created_ts=time_3 - timedelta(hours=1), - write=(54321, "I HAVE AN OLDER created_ts SO I LOSE"), - expect_read=(54321, "I HAVE AN OLDER created_ts SO I LOSE"), - ) - - """ created_ts is used as a tie breaker, using newer created_ts here so we should overwrite """ - _driver_rw_test( - event_ts=time_1 + timedelta(hours=1), - created_ts=time_3 + timedelta(hours=1), - write=(96864, "I HAVE A NEWER created_ts SO I WIN"), - expect_read=(96864, "I HAVE A NEWER created_ts SO I WIN"), - ) diff --git a/sdk/python/tests/utils/online_store_utils.py b/sdk/python/tests/utils/online_store_utils.py new file mode 100644 index 0000000000..f72b4d5a2a --- /dev/null +++ b/sdk/python/tests/utils/online_store_utils.py @@ -0,0 +1,56 @@ +from datetime import datetime + +import boto3 + +from feast import utils +from feast.infra.online_stores.helpers import compute_entity_id +from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto +from feast.protos.feast.types.Value_pb2 import Value as ValueProto + + +def _create_n_customer_test_samples(n=10): + return [ + ( + EntityKeyProto( + join_keys=["customer"], entity_values=[ValueProto(string_val=str(i))] + ), + { + "avg_orders_day": ValueProto(float_val=1.0), + "name": ValueProto(string_val="John"), + "age": ValueProto(int64_val=3), + }, + datetime.utcnow(), + None, + ) + for i in range(n) + ] + + +def _create_test_table(project, tbl_name, region): + client = boto3.client("dynamodb", region_name=region) + client.create_table( + TableName=f"{project}.{tbl_name}", + KeySchema=[{"AttributeName": "entity_id", "KeyType": "HASH"}], + AttributeDefinitions=[{"AttributeName": "entity_id", "AttributeType": "S"}], + BillingMode="PAY_PER_REQUEST", + ) + + +def _delete_test_table(project, tbl_name, region): + client = boto3.client("dynamodb", region_name=region) + client.delete_table(TableName=f"{project}.{tbl_name}") + + +def _insert_data_test_table(data, project, tbl_name, region): + dynamodb_resource = boto3.resource("dynamodb", region_name=region) + table_instance = dynamodb_resource.Table(f"{project}.{tbl_name}") + for entity_key, features, timestamp, created_ts in data: + entity_id = compute_entity_id(entity_key) + with table_instance.batch_writer() as batch: + batch.put_item( + Item={ + "entity_id": entity_id, + "event_ts": str(utils.make_tzaware(timestamp)), + "values": {k: v.SerializeToString() for k, v in features.items()}, + } + ) diff --git a/sdk/python/tests/utils/online_write_benchmark.py b/sdk/python/tests/utils/online_write_benchmark.py index d8041e51cd..82ffc8e98b 100644 --- a/sdk/python/tests/utils/online_write_benchmark.py +++ b/sdk/python/tests/utils/online_write_benchmark.py @@ -11,11 +11,12 @@ from feast import FileSource from feast.driver_test_data import create_driver_hourly_stats_df from feast.entity import Entity -from feast.feature import Feature from feast.feature_store import FeatureStore from feast.feature_view import FeatureView +from feast.field import Field from feast.infra.provider import _convert_arrow_to_proto from feast.repo_config import RepoConfig +from feast.types import Float32, Int32 from feast.value_type import ValueType @@ -23,12 +24,12 @@ def create_driver_hourly_stats_feature_view(source): driver_stats_feature_view = FeatureView( name="driver_stats", entities=["driver_id"], - features=[ - Feature(name="conv_rate", dtype=ValueType.FLOAT), - Feature(name="acc_rate", dtype=ValueType.FLOAT), - Feature(name="avg_daily_trips", dtype=ValueType.INT32), + schema=[ + Field(name="conv_rate", dtype=Float32), + Field(name="acc_rate", dtype=Float32), + Field(name="avg_daily_trips", dtype=Int32), ], - input=source, + source=source, ttl=timedelta(hours=2), ) return driver_stats_feature_view @@ -37,7 +38,7 @@ def create_driver_hourly_stats_feature_view(source): def create_driver_hourly_stats_source(parquet_path): return FileSource( path=parquet_path, - event_timestamp_column="datetime", + timestamp_field="event_timestamp", created_timestamp_column="created", ) diff --git a/sdk/python/usage_tests/test_usage.py b/sdk/python/usage_tests/test_usage.py deleted file mode 100644 index 3e571a2120..0000000000 --- a/sdk/python/usage_tests/test_usage.py +++ /dev/null @@ -1,213 +0,0 @@ -# Copyright 2020 The Feast Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import tempfile -import uuid -from datetime import datetime - -from feast.infra.online_stores.sqlite import SqliteOnlineStoreConfig -from tenacity import retry, wait_exponential, stop_after_attempt - -from google.cloud import bigquery -import os -from time import sleep - -from feast import Client, Entity, ValueType, FeatureStore, RepoConfig - - -USAGE_BIGQUERY_TABLE = ( - "kf-feast.feast_telemetry.cloudfunctions_googleapis_com_cloud_functions" -) - - -def test_usage_on_v09(mocker): - # Setup environment - old_environ = dict(os.environ) - os.environ["FEAST_IS_USAGE_TEST"] = "True" - test_usage_id = str(uuid.uuid4()) - os.environ["FEAST_FORCE_USAGE_UUID"] = test_usage_id - test_client = Client(serving_url=None, core_url=None, usage=True) - test_client.set_project("project1") - entity = Entity( - name="driver_car_id", - description="Car driver id", - value_type=ValueType.STRING, - labels={"team": "matchmaking"}, - ) - - mocker.patch.object( - test_client, "_apply_entity", return_value=None, - ) - - test_client.apply(entity) - - os.environ.clear() - os.environ.update(old_environ) - - ensure_bigquery_usage_id_with_retry(test_usage_id) - - -def test_usage_off_v09(mocker): - old_environ = dict(os.environ) - os.environ["FEAST_IS_USAGE_TEST"] = "True" - test_usage_id = str(uuid.uuid4()) - os.environ["FEAST_FORCE_USAGE_UUID"] = test_usage_id - os.environ["FEAST_USAGE"] = "False" - - test_client = Client(serving_url=None, core_url=None, usage=False) - test_client.set_project("project1") - entity = Entity( - name="driver_car_id", - description="Car driver id", - value_type=ValueType.STRING, - labels={"team": "matchmaking"}, - ) - - mocker.patch.object( - test_client, "_apply_entity", return_value=None, - ) - - test_client.apply(entity) - - os.environ.clear() - os.environ.update(old_environ) - sleep(30) - rows = read_bigquery_usage_id(test_usage_id) - assert rows.total_rows == 0 - - -def test_usage_on(): - old_environ = dict(os.environ) - test_usage_id = str(uuid.uuid4()) - os.environ["FEAST_FORCE_USAGE_UUID"] = test_usage_id - os.environ["FEAST_IS_USAGE_TEST"] = "True" - os.environ["FEAST_USAGE"] = "True" - - with tempfile.TemporaryDirectory() as temp_dir: - test_feature_store = FeatureStore( - config=RepoConfig( - registry=os.path.join(temp_dir, "registry.db"), - project="fake_project", - provider="local", - online_store=SqliteOnlineStoreConfig( - path=os.path.join(temp_dir, "online.db") - ), - ) - ) - entity = Entity( - name="driver_car_id", - description="Car driver id", - value_type=ValueType.STRING, - labels={"team": "matchmaking"}, - ) - - test_feature_store.apply([entity]) - - os.environ.clear() - os.environ.update(old_environ) - ensure_bigquery_usage_id_with_retry(test_usage_id) - - -def test_usage_off(): - old_environ = dict(os.environ) - test_usage_id = str(uuid.uuid4()) - os.environ["FEAST_IS_USAGE_TEST"] = "True" - os.environ["FEAST_USAGE"] = "False" - os.environ["FEAST_FORCE_USAGE_UUID"] = test_usage_id - - with tempfile.TemporaryDirectory() as temp_dir: - test_feature_store = FeatureStore( - config=RepoConfig( - registry=os.path.join(temp_dir, "registry.db"), - project="fake_project", - provider="local", - online_store=SqliteOnlineStoreConfig( - path=os.path.join(temp_dir, "online.db") - ), - ) - ) - entity = Entity( - name="driver_car_id", - description="Car driver id", - value_type=ValueType.STRING, - labels={"team": "matchmaking"}, - ) - test_feature_store.apply([entity]) - - os.environ.clear() - os.environ.update(old_environ) - sleep(30) - rows = read_bigquery_usage_id(test_usage_id) - assert rows.total_rows == 0 - - -def test_exception_usage_on(): - old_environ = dict(os.environ) - test_usage_id = str(uuid.uuid4()) - os.environ["FEAST_FORCE_USAGE_UUID"] = test_usage_id - os.environ["FEAST_IS_USAGE_TEST"] = "True" - os.environ["FEAST_USAGE"] = "True" - - try: - test_feature_store = FeatureStore("/tmp/non_existent_directory") - except: - pass - - os.environ.clear() - os.environ.update(old_environ) - ensure_bigquery_usage_id_with_retry(test_usage_id) - - -def test_exception_usage_off(): - old_environ = dict(os.environ) - test_usage_id = str(uuid.uuid4()) - os.environ["FEAST_IS_USAGE_TEST"] = "True" - os.environ["FEAST_USAGE"] = "False" - os.environ["FEAST_FORCE_USAGE_UUID"] = test_usage_id - - try: - test_feature_store = FeatureStore("/tmp/non_existent_directory") - except: - pass - - os.environ.clear() - os.environ.update(old_environ) - sleep(30) - rows = read_bigquery_usage_id(test_usage_id) - assert rows.total_rows == 0 - - -@retry(wait=wait_exponential(multiplier=1, min=1, max=10), stop=stop_after_attempt(7)) -def ensure_bigquery_usage_id_with_retry(usage_id): - rows = read_bigquery_usage_id(usage_id) - if rows.total_rows != 1: - raise Exception(f"Could not find usage id: {usage_id}") - - -def read_bigquery_usage_id(usage_id): - bq_client = bigquery.Client() - query = f""" - SELECT - telemetry_id - FROM ( - SELECT - JSON_EXTRACT(textPayload, '$.telemetry_id') AS telemetry_id - FROM - `{USAGE_BIGQUERY_TABLE}` - WHERE - timestamp >= TIMESTAMP(\"{datetime.utcnow().date().isoformat()}\")) - WHERE - telemetry_id = '\"{usage_id}\"' - """ - query_job = bq_client.query(query) - return query_job.result() diff --git a/ui/.babelrc.js b/ui/.babelrc.js new file mode 100644 index 0000000000..68a0192e50 --- /dev/null +++ b/ui/.babelrc.js @@ -0,0 +1,3 @@ +module.exports = { + presets: [["@babel/preset-env"], ["@babel/preset-react"]], +}; diff --git a/ui/PUBLISHING_TO_NPM.md b/ui/PUBLISHING_TO_NPM.md new file mode 100644 index 0000000000..2ad42fd227 --- /dev/null +++ b/ui/PUBLISHING_TO_NPM.md @@ -0,0 +1,13 @@ +# Publishing the Feast Package to NPM + +The Feast UI is published as a module to NPM and can be found here: https://www.npmjs.com/package/@feast-dev/feast-ui + +To publish a new version of the module, you will need to be part of the @feast-dev team in NPM. Ask Tony to add you if necessary. You will also need to [login to your NPM account on the command line](https://docs.npmjs.com/cli/v8/commands/npm-adduser). + +## Steps for Publishing + +1. Make sure tests are passing. Run tests with `yarn tests` in the ui directory. +2. Bump the version number in `package.json` as appropriate. +3. Package the modules for distributions. Run the library build script with `yarn build:lib`. We use [Rollup](https://rollupjs.org/) for building the module, and the configs are in the `rollup.config.js` file. +4. Publish the package to NPM. Run `npm publish` +5. [Check NPM to see that the package was properly publish](https://www.npmjs.com/package/@feast-dev/feast-ui). diff --git a/ui/README.md b/ui/README.md new file mode 100644 index 0000000000..80e20b3164 --- /dev/null +++ b/ui/README.md @@ -0,0 +1,150 @@ +# [WIP] Feast Web UI + +![Sample UI](sample.png) + +This project was bootstrapped with [Create React App](https://github.com/facebook/create-react-app). + +## Project structure +The Web UI is powered by a JSON registry dump from Feast (running `feast registry-dump`). Running `yarn start` launches a UI +powered by test data. +- `public/` contains assets as well as demo data loaded by the Web UI. + - There is a `projects-list.json` which represents all Feast projects the UI shows. + - There is also a `registry.json` which is the registry dump for the feature repo. +- `feature_repo/` contains a sample Feast repo which generates the `registry.json` +- `src/` contains the Web UI source code. This parses the registry json blob in `src/parsers` to make this data +available for the rest of the UI. +- `src/custom-tabs` includes sample custom tabs. This is a WIP plugin system where users can inject their own tabs and +data to the UI. + +## Usage + +There are two modes of usage: importing the UI as a module, or running the entire build as a React app. + +### Importing the UI as a module + +This is the recommended way to use Feast UI for teams maintaining their own internal UI for their deployment of Feast. + +Start with bootstrapping a React app with `create-react-app` + +``` +npx create-react-app your-feast-ui +``` + +Then, in your app folder, install Feast UI and its peer dependencies. Assuming you use yarn + +``` +yarn add @feast-dev/feast-ui +yarn add @elastic/eui @elastic/datemath @emotion/react moment prop-types inter-ui react-query react-router-dom use-query-params zod typescript query-string d3 @types/d3 +``` + +Edit `index.js` in the React app to use Feast UI. + +```js +import React from "react"; +import ReactDOM from "react-dom"; +import "./index.css"; + +import FeastUI from "@feast-dev/feast-ui"; +import "@feast-dev/feast-ui/dist/feast-ui.css"; + +ReactDOM.render( + + + , + document.getElementById("root") +); +``` + +When you start the React app, it will look for `project-list.json` to find a list of your projects. The JSON should looks something like this. + +```json +{ + "projects": [ + { + "name": "Credit Score Project", + "description": "Project for credit scoring team and associated models.", + "id": "credit_score_project", + "registryPath": "/registry.json" + }, + ] +} +``` + +``` +// Start the React App +yarn start +``` + +#### Customization + +The advantage of importing Feast UI as a module is in the ease of customization. The `` component exposes a `feastUIConfigs` prop thorough which you can customize the UI. Currently it supports a few parameters. + +##### Fetching the Project List + +You can use `projectListPromise` to provide a promise that overrides where the Feast UI fetches the project list from. + +```jsx + { + return res.json(); + }) + }} +/> +``` + +##### Custom Tabs + +You can add custom tabs for any of the core Feast objects through the `tabsRegistry`. + +``` +const tabsRegistry = { + RegularFeatureViewCustomTabs: [ + { + label: "Custom Tab Demo", // Navigation Label for the tab + path: "demo-tab", // Subpath for the tab + Component: RFVDemoCustomTab, // a React Component + }, + ] +} + + +``` + +Examples of custom tabs can be found in the `/custom-tabs` folder. + +### Alternative: Run this Repo + +If you would like to simply try things out and see how the UI works, you can simply run the code in this repo. First: + +### `yarn install` + +That will install the all the dependencies that the UI needs, as well as development dependencies. Then in the project directory, you can run: + +### `yarn start` + +Runs the app in the development mode.\ +Open [http://localhost:3000](http://localhost:3000) to view it in the browser. + +The page will reload if you make edits.\ +You will also see any lint errors in the console. + +### `yarn test` + +Launches the test runner in the interactive watch mode.\ +See the section about [running tests](https://facebook.github.io/create-react-app/docs/running-tests) for more information. + + +## On React and Create React App + +This project was bootstrapped with Create React App, and uses its scripts to simplify UI development. You can learn more in the [Create React App documentation](https://facebook.github.io/create-react-app/docs/getting-started). + +To learn React, check out the [React documentation](https://reactjs.org/). diff --git a/ui/feature_repo/README.md b/ui/feature_repo/README.md new file mode 100644 index 0000000000..db706e709e --- /dev/null +++ b/ui/feature_repo/README.md @@ -0,0 +1,72 @@ +# Feast repo + +## Overview + +This pulls from the dataset used in https://github.com/feast-dev/feast-aws-credit-scoring-tutorial but adds metadata for a full set of Feast FCOs. + +This also adds an on demand feature view + feature services + a saved dataset. + +## Setting up Feast + +Install a dev build Feast using pip + +Clone a feast repo: +```bash +git clone https://github.com/feast-dev/feast.git +``` + +Install a dev build of feast +```bash +cd feast +pip install -e "sdk/python[dev]" +``` + +Then for this demo, you'll actually need to fix a bug by adding this to `type_map.py#L144`: +```python +if isinstance(value, np.bool_): + return ValueType.BOOL +``` + +## Test features +We have already set up a feature repository here using test data from [data](data)). Features have already been pre-materialized to a local sqlite online store. The results of `feast registry-dump` have been thrown into [registry.json](../public/registry.json) + +To query against this registry, you can use run the `test_get_features.py` +```bash +python test_get_features.py +``` + +Output: +``` +--- Historical features (from saved dataset) --- + mortgage_due credit_card_due missed_payments_1y total_wages dob_ssn event_timestamp state tax_returns_filed location_type population city zipcode +0 741165 2944 3 71067272 19781116_7723 2021-04-12 08:12:10+00:00 MI 2424 PRIMARY 4421 WEIDMAN 48893 +1 91803 8419 0 534687864 19530219_5179 2021-04-12 10:59:42+00:00 GA 19583 PRIMARY 38542 DALTON 30721 +2 1553523 5936 0 226748453 19500806_6783 2021-04-12 15:01:12+00:00 TX 6827 PRIMARY 12902 CLEBURNE 76031 +3 976522 833 0 34796963 19931128_5771 2021-04-12 16:40:26+00:00 VA 1287 PRIMARY 2342 GLADE HILL 24092 + +--- Online features --- +city : ['DALTON'] +credit_card_due : [8419] +dob_ssn : ['19530219_5179'] +location_type : ['PRIMARY'] +missed_payments_1y : [0] +mortgage_due : [91803] +population : [38542] +state : ['GA'] +tax_returns_filed : [19583] +total_wages : [534687864] +zipcode : [30721] +city : ['DALTON'] +credit_card_due : [8419] +dob_ssn : ['19530219_5179'] +location_type : ['PRIMARY'] +missed_payments_1y : [0] +mortgage_due : [91803] +population : [38542] +state : ['GA'] +tax_returns_filed : [19583] +total_wages : [534687864] +transaction_gt_last_credit_card_due : [False] +zipcode : [30721] + +``` diff --git a/ui/feature_repo/data/credit_history.parquet b/ui/feature_repo/data/credit_history.parquet new file mode 100644 index 0000000000..a368ae5bbf Binary files /dev/null and b/ui/feature_repo/data/credit_history.parquet differ diff --git a/ui/feature_repo/data/credit_history_sample.csv b/ui/feature_repo/data/credit_history_sample.csv new file mode 100644 index 0000000000..7b1ffca1c1 --- /dev/null +++ b/ui/feature_repo/data/credit_history_sample.csv @@ -0,0 +1,1001 @@ +event_timestamp,dob_ssn,credit_card_due,mortgage_due,student_loan_due,vehicle_loan_due,hard_pulls,missed_payments_2y,missed_payments_1y,missed_payments_6m,bankruptcies,created_timestamp +2020-04-26 18:01:04.746575,19530219_5179,8419,91803,22328,15078,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19781116_7723,2944,741165,2515,28605,0,3,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19931128_5771,833,976522,33000,21733,9,7,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19500806_6783,5936,1553523,48955,26219,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19620322_7692,1575,1067381,9501,15814,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19521115_8793,6263,1957370,35510,17095,3,2,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19860608_5766,4378,490394,34057,935,8,3,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19690214_3370,9794,1325678,37003,17179,3,1,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19710204_2474,8840,1333759,21991,27414,9,1,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19670810_4552,888,170320,41180,7930,6,0,2,1,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19570513_7405,3214,157339,42470,23401,5,3,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19910429_8449,9983,1236123,26476,14290,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19971102_8169,8330,1142638,48371,5272,6,1,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19470930_7727,7509,1715027,31185,1114,9,4,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19961104_6715,3994,1278825,1411,24239,2,4,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19670424_9712,9407,179919,32748,21171,0,7,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19871021_4913,1397,1403895,4965,6585,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19990119_7587,8006,1687,476,6638,9,5,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19671011_6421,3032,1489421,22412,4639,0,4,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19700719_4951,8537,443259,45625,3998,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19560119_8484,6766,994051,21635,27550,5,7,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19450619_1838,136,734472,24385,12226,7,3,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19860229_1269,1775,545713,7870,11279,10,7,1,1,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19550716_6489,7581,1014560,32337,21698,0,1,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19770909_3749,625,1755851,10071,28928,5,0,2,0,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19460708_1744,4325,643538,1496,4396,5,6,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19960408_6067,6766,81299,17294,647,2,6,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19510206_8540,550,689556,15164,1261,5,5,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19480409_1765,781,1788837,45060,15703,8,4,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,20010711_5288,3669,523107,34903,26987,4,2,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19900515_9945,6308,712633,2530,15023,2,2,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19570224_7610,829,452402,7489,14154,2,4,1,1,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19680319_9760,5420,1609049,12413,22708,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19880126_6480,6893,690613,10648,4220,5,6,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19651201_6947,5902,574178,26526,6275,2,5,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19910205_1576,7060,1377225,13496,27732,1,3,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19570218_5198,9914,1731608,8670,26764,5,0,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19520103_7721,6901,1672088,38186,16850,0,2,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19650605_9597,4572,1846820,35528,15177,6,6,3,1,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19640413_7209,228,842260,31911,10490,8,5,3,0,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19680829_6939,2035,1093857,15884,24599,7,2,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,20010108_2924,8732,1722568,41986,17998,5,3,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19670430_8377,3293,591913,42962,21909,8,6,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19741208_1968,5091,1012610,27470,19975,3,3,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19910313_3758,5601,1969556,32415,21626,1,5,3,1,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19950216_6438,1800,978437,37814,22455,9,3,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19531122_1254,9134,1371146,22673,2523,7,6,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19600528_8373,7590,1243939,38863,14312,2,1,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19781127_9734,7064,223815,7077,24236,3,6,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19530526_5385,5788,1103821,16667,26447,7,6,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19901015_2547,3676,119483,29373,20428,3,1,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19850425_5098,9691,624622,24312,1339,4,2,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19711214_9964,6412,951385,18397,11622,8,0,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19691030_6048,4645,81572,46230,3409,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19600107_9670,6343,1679435,48059,3999,6,6,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19680520_5879,6740,1674732,46648,5924,7,3,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19460203_1497,3530,769990,13311,934,5,7,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19700612_6032,1904,1945098,20776,28875,3,0,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19611204_2856,6845,1122678,10904,20002,2,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19600712_3825,4654,807475,19002,10652,10,3,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19630201_1104,9562,869403,6171,25284,3,6,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19781020_4506,5350,1406781,9976,5932,6,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19530904_8321,3124,1889716,49815,13545,10,5,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19481028_1851,3701,1897684,44357,28979,0,5,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19570508_3454,467,386000,42973,15043,10,4,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19910102_5025,8223,1505476,29470,22983,10,2,1,1,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19990707_7635,2987,529361,39841,18339,1,7,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19670330_6469,4647,232371,30691,2246,2,5,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19841116_6859,316,462286,7886,26795,8,1,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19511213_4328,1547,934992,42886,23907,4,0,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19710628_9679,4869,400643,26086,14076,7,3,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19510702_6043,9969,1179344,14965,17222,10,6,1,0,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19501216_8061,1590,196668,9858,10902,1,7,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19800406_8439,2476,1448321,16995,5072,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19661119_1383,8405,696650,7694,28501,4,6,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19930713_8300,3680,220278,593,1057,9,4,0,1,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19480403_9058,7282,1448150,25088,19202,9,3,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19890305_9125,6649,481125,10226,11764,2,3,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19840706_3904,8005,258167,47845,16561,9,1,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19490411_3733,8195,1058510,28128,17354,4,5,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19600103_8778,9924,409032,22741,19081,6,7,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19930717_4009,8366,208728,32523,28665,3,5,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19630206_1406,7771,1502676,5712,16097,9,6,0,1,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19650510_1847,8661,1683950,31588,7983,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19620827_2752,8560,653796,4470,1465,7,2,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19920527_2599,3155,1426405,33529,3392,4,2,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19760601_8988,8689,1196760,35979,2019,7,2,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19891024_5166,7477,105750,39785,20144,10,7,3,0,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19760415_2873,3646,1340488,1975,7241,10,1,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19700503_6631,2865,284724,390,2074,9,2,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19920818_7772,8233,898934,9757,23500,1,6,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19820321_2520,8758,1145058,11013,4008,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19980418_6975,4318,425750,49574,24519,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19630809_4111,5965,1059057,39499,7886,0,1,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19810126_4227,9385,1043186,13254,22303,0,4,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19611219_9380,3067,17798,4920,67,9,1,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19970912_1311,6454,512020,39107,1663,8,1,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19610821_4440,741,77743,45754,29855,8,4,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19731120_7231,2302,1475520,45108,1590,8,5,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19490324_4176,7069,50394,43999,17305,7,6,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19520224_5297,8390,94186,22192,3933,8,7,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19480925_1215,8020,845472,39852,24538,3,0,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19740218_3169,950,1588250,30682,16717,10,6,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19830227_2409,2930,586180,26945,25278,6,1,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19700704_9426,4148,929766,911,2006,4,3,2,1,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19731028_8188,5315,962752,42172,28962,0,5,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19941227_1572,5473,1051110,38026,6504,3,0,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19670425_2855,7779,1035799,26269,8462,9,5,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19991014_3843,5079,769289,7514,6027,6,2,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19521021_5466,7975,194506,32614,10385,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,20010720_4849,4481,1226035,31168,8619,8,1,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19560104_6116,845,60874,6798,27548,4,1,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19930222_8671,8464,818311,15641,13495,7,2,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19780113_2322,4162,1661508,28015,9574,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19540808_8685,8373,1234790,42972,9985,1,1,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,20010717_9793,3355,397649,13816,20498,7,6,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19520707_2576,8262,219959,3699,9905,7,3,3,0,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19490511_5108,5130,531970,48998,12817,5,2,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19700111_1690,8410,902917,6830,3230,8,7,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19510315_2267,4661,1475505,10441,21415,6,1,1,1,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19560810_5371,7806,391735,35396,5966,3,1,3,0,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19920704_7897,455,1567504,7001,27140,9,3,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19590613_3608,7794,18936,1465,10584,4,0,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19700122_8413,2175,1133433,7028,3087,0,3,2,0,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19950316_6954,7539,658280,19736,4797,4,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19640720_2122,6598,194099,19411,10525,6,3,2,0,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19580411_9996,6499,1819439,27592,11735,1,4,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19490420_5866,3948,1205588,18506,8539,5,6,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19870709_3589,2168,1179204,7460,22664,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19820409_5890,9316,1767281,23743,16008,1,6,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19940711_2500,4136,160955,26466,27816,8,6,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19700926_3180,8404,203007,11532,27533,9,2,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19830512_2877,135,274016,5150,24507,7,6,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19550204_2997,7289,900588,25381,423,9,0,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19450103_1272,308,399149,44470,25345,10,2,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19831011_5231,9317,598170,29936,881,1,6,0,0,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,20000420_9428,2311,743923,9713,10835,0,6,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19790506_6477,7669,773690,8208,15767,8,3,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19900330_1524,4883,1709559,46917,11904,0,1,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19870312_3552,9761,1656830,12054,3372,5,2,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19731020_1834,432,1562103,7674,28808,9,5,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19710220_8235,5682,171533,16517,19648,5,3,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19830306_7067,6301,35226,11020,28269,9,7,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19550628_7691,7871,1291222,10552,8996,6,7,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19760118_7610,57,245751,17091,5661,8,1,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19670411_1107,1081,949071,27918,25150,0,7,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19730605_6441,1083,791170,48033,26152,10,6,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19900503_1072,711,742435,31650,26852,1,0,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19890522_9087,6717,336460,33739,23435,2,7,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19680815_9970,7776,85145,28235,16405,7,7,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19930125_8099,513,1415527,40500,10739,3,3,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19500128_9622,8801,1976772,47084,8101,10,6,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19520424_1840,3079,1789243,15882,7427,9,7,0,0,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19870114_1971,1687,750973,13509,5607,5,2,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19950204_9632,5907,1167846,19918,27481,1,3,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19850330_1348,3444,962469,38305,16751,8,7,3,0,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19830320_5111,2041,1414928,17343,29735,3,0,3,0,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19730523_3223,1369,1588660,42824,20316,4,4,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19801113_7955,9196,368704,20886,3603,0,6,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19720111_8293,9186,12667,17161,12770,6,1,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19830908_4067,4761,786215,4859,22333,7,2,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19560623_6174,2133,10336,34928,13148,9,2,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19570121_5654,8911,434147,6797,7945,10,1,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19851030_2417,2206,31400,908,25402,8,2,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19840627_5481,9268,1561211,2585,17435,10,5,2,0,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19550813_3982,8281,1329078,45176,25975,3,5,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19610822_3926,916,379198,13669,14034,9,1,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19700130_8637,6394,497642,24904,17005,5,1,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19581005_1003,6063,1285392,40009,21940,10,6,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19610515_2615,6959,700790,46288,11612,0,6,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19520322_8880,319,1992495,23913,3476,3,2,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19750725_4014,4327,501607,7793,16312,0,3,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19720104_5235,9382,1647134,16382,8247,9,3,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19990104_8788,1787,41405,22782,10877,9,6,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19680504_8011,9504,1169583,28405,17900,3,7,1,1,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19850522_5065,2253,1605121,32891,6019,3,7,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19940408_6468,9505,1056253,24951,21304,9,0,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19590917_6176,4998,1092518,47227,22161,3,0,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19970421_2759,130,1394475,1953,15493,0,6,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19510614_3614,7491,1066727,3319,7469,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19890406_9601,5517,1960888,44351,9693,7,7,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19931101_8895,4865,1884658,29614,3899,0,6,1,1,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19451001_6728,9720,1460786,46121,5676,0,7,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19621114_7877,5262,1559354,15189,10954,4,6,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19560627_2936,5540,436549,41561,27958,3,0,2,1,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19641107_5294,7101,1150072,17075,20193,5,6,0,0,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19850114_9358,135,1186565,31986,10901,9,7,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19490901_2485,2808,952104,10659,5717,0,1,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19640417_4011,2955,1077263,31766,23573,9,4,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19530912_4661,3523,789422,11390,3983,8,0,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19951116_8514,1864,1388160,41066,29628,4,1,3,1,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19770207_2813,6563,1760262,39288,1094,9,2,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,20000812_5374,1542,68151,24828,5514,2,1,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,20000725_1097,6508,1132816,29622,3571,0,6,1,1,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19640417_5210,4819,369939,2628,17794,5,5,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19700418_7109,6572,468361,5941,19485,9,3,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19510504_9959,6530,294120,42918,12802,10,2,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19830409_3875,809,311505,47596,14210,10,0,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19581105_6041,9064,285215,7255,10557,2,3,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19780205_8464,1081,764415,19887,15160,0,1,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19761127_4157,2609,1258259,9047,704,3,7,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19770408_5832,9175,120891,30802,1701,8,6,3,0,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19880501_2798,5131,1433498,48469,8333,6,4,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19610302_7840,6574,1923196,18531,24428,2,0,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19870514_3720,2822,960854,16845,307,1,2,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19611126_7484,4538,163346,23159,2272,7,4,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19611014_1081,5459,202830,49153,27162,7,5,3,0,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19491213_1371,4073,1247441,26379,21997,4,6,3,1,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19860122_2342,8773,1697312,4556,757,2,4,1,0,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19660410_9925,5892,1735353,12732,24314,1,5,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19600306_4676,8829,1266465,24578,3936,5,5,1,1,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,20011116_7090,8031,855819,33189,14883,0,5,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19831017_6397,8931,194709,22386,29045,3,0,0,1,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19531203_2353,3310,1332415,36072,28314,4,1,3,0,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19490128_3952,21,980335,9635,22234,0,1,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19720719_9120,5514,1034611,45053,13389,4,6,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19801013_3901,5393,1974500,28648,24370,6,1,1,1,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19980327_5767,3639,1565171,28097,839,6,6,3,0,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19891121_2714,2335,1679891,10115,15740,8,3,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19760719_6739,3317,1627919,17852,29101,7,6,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19750530_5770,2342,1044578,20363,27588,6,3,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19771121_1901,1066,1072604,41100,15873,7,5,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19780807_8461,6127,212185,9404,3658,5,2,2,0,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19630705_5046,9122,1845021,23097,20083,4,2,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19581217_4819,1479,1924266,1071,9216,4,3,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19690708_2092,8103,366200,5512,1569,5,1,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19880229_7028,1542,1170080,7055,217,4,0,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19580726_5312,1677,391234,19319,11092,3,3,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19850604_7301,8713,1053432,13596,12542,9,2,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19530117_8633,6980,1000654,40575,28232,9,1,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19810704_5141,5621,402282,30082,9400,1,1,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19471215_9342,8714,326671,1291,6920,5,3,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,20010127_7232,5364,1580727,47445,21445,4,0,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19710110_7951,4408,418912,44621,12763,2,1,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19680109_5272,4221,1797142,44355,5462,7,1,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19710818_8630,6746,1782842,15611,16441,1,5,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19781106_1825,257,1537277,30909,19528,5,6,3,1,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19500514_4837,9583,810132,26662,2630,10,5,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19451208_8411,8377,1214951,31592,13243,10,4,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19670525_7565,1718,1018591,8487,29503,4,0,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19520611_9229,3024,1635249,12141,27469,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19781006_1975,507,538720,11610,9751,3,2,2,0,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19870518_5031,5492,921858,21541,5020,10,1,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19790525_4042,7319,357150,45961,19103,9,6,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19941009_9610,646,1598905,30873,21013,3,6,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19800624_4503,7864,1239928,11874,29651,9,7,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19570601_1281,9700,1998639,41605,9040,1,7,1,1,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19780309_8183,3414,837379,26141,9688,7,0,3,1,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19580624_1339,2297,369645,7484,24173,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19651117_7821,7742,1769602,27123,11235,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19810717_5916,2803,1313201,9518,15353,7,4,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19601207_6784,9259,303940,10291,25623,7,6,2,0,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19530726_5480,922,34381,22539,20859,9,1,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19571124_2357,7914,385048,41676,12669,7,2,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19870718_6795,2743,168968,32692,7114,2,0,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19530504_1649,6198,144555,34956,28844,6,5,1,0,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19741130_8823,6729,838947,26462,26754,5,1,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19591020_6402,9504,256862,31767,21971,7,0,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19560427_1219,6265,1668623,13257,3838,2,2,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19800901_1571,6405,12738,49219,26706,0,2,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19520623_3179,9210,209316,37155,12154,1,2,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19490812_8051,4726,725766,4895,15796,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19580616_3873,6196,1841317,11258,16477,0,4,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19820223_6526,9202,752242,30144,8538,0,7,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19551026_4206,4316,1353149,32623,1467,2,2,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19761201_2521,124,550317,33000,6820,8,3,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19580620_4235,3139,1477674,25990,20828,4,4,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19640108_9514,578,1561224,21982,23795,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19540507_7760,2954,1089675,48109,6799,3,6,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19970701_6408,4352,1763038,6147,18829,7,1,3,0,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19640106_6587,7827,911228,32796,17141,7,3,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19580425_8735,6502,313316,42683,12193,3,1,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19860319_9967,1326,858012,43067,21626,7,3,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19480921_8602,2513,54915,45942,9902,2,0,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19970721_7421,2836,1053154,17731,29329,9,5,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19470523_2423,868,1615035,41020,28706,10,7,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19670211_3281,678,1435227,42679,3002,9,4,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19620725_2939,6866,1448982,15432,9429,8,5,2,1,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19960629_7239,2452,1099702,27430,2354,0,7,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19950802_8979,9665,547531,7941,5642,6,4,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19460905_4528,7875,631474,29582,13844,10,0,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19890916_7865,7214,1107664,29698,584,1,0,2,1,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19560410_9641,5541,1763580,21212,21950,8,4,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19730706_3530,9673,1268964,29185,703,10,2,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19710802_2306,8214,1161378,24423,18795,10,5,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19960307_9663,1566,992302,16800,5320,5,5,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19960624_2145,7211,619434,30367,24524,10,1,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19691108_4576,7296,25590,31737,20372,9,1,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19940329_8535,6626,519063,434,7984,4,2,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19830529_9554,7163,1380911,12126,26324,8,5,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19730909_6557,1181,1962803,3420,1521,4,7,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19521206_7714,708,63639,16860,25142,3,7,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19600707_2475,6907,1006769,92,28962,2,5,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19610318_9363,8484,1447530,44169,28476,1,0,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19480329_4957,8269,73103,44314,11845,9,4,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19970318_8198,4560,1777266,35984,18096,7,5,1,1,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19700507_9717,145,622855,24766,12580,1,2,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19630530_8270,3147,1766405,35553,10303,0,6,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19570310_9494,330,1018737,45759,27263,4,5,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19700723_7524,7424,1338401,17110,7603,8,7,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19990107_3795,1085,619307,11026,6185,9,3,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19750315_6841,7455,1972328,21561,7650,8,5,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19950804_5076,9181,1952752,45044,7220,1,0,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19721002_3073,2071,1530468,11320,1752,1,6,2,1,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19820719_1658,6928,1575203,22532,3291,3,7,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19860128_4694,3391,687372,31728,11450,3,3,2,0,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19780316_4996,4209,931958,23738,11243,3,6,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19670410_4188,8405,220953,2653,8452,4,6,3,1,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19531028_8117,4274,1427649,18492,26720,1,7,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19871220_7407,8050,1614005,41381,102,5,5,3,0,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19870822_5956,2364,1477874,32325,29210,7,7,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19770515_4747,7738,260662,30874,29399,3,2,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19520314_5139,5635,210962,2861,14992,9,6,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19990903_8506,8532,1176255,4791,7521,8,4,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,20001214_8741,1555,129102,20692,29722,10,6,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19630814_2881,2434,499564,32286,21509,7,3,3,1,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19681218_5072,3653,1533920,13462,20705,7,3,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19731126_7943,254,650514,34028,15623,4,6,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,20000310_6553,8645,228039,29372,11685,0,2,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19830703_9556,6750,431164,27311,10841,3,3,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19820524_8008,6971,1345494,14736,5167,6,3,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19571225_3652,6548,1061390,43050,10365,9,7,2,1,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19690920_1417,6181,78926,35538,12410,0,4,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19941221_9141,3608,1568258,24874,28196,8,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19730522_8934,915,1131085,24990,21424,0,5,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19610913_1038,460,1032108,1960,6081,8,0,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19630728_2854,643,579049,12286,29049,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19900526_1409,6690,1139562,4591,2609,6,4,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19790506_1620,5154,1760425,1368,26796,4,4,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19820502_6236,2261,1381301,12614,6220,10,1,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19900302_6772,4455,753932,11250,4302,8,7,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19711110_8437,3925,1400572,18818,22440,5,2,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19761009_8287,4178,909170,45117,5718,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19551009_5163,5883,1658413,27514,9496,6,3,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19820123_7820,4042,1533154,28255,27377,2,1,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19710512_2756,1668,1635995,40285,29097,4,1,2,0,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19520325_3871,9406,135779,13443,6961,0,4,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19761216_9088,3410,396065,28036,28589,8,7,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19901002_3387,3494,1530598,13496,25642,0,5,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19831211_5503,3495,1424057,29138,776,7,1,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19481229_7418,5244,1143094,24905,5730,4,1,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19490223_9792,5575,1260767,5042,21248,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19900630_1281,1510,1220611,45544,23917,0,0,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,20010621_4546,7727,1481657,12933,28930,10,4,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19590615_4456,9095,1158293,9999,23327,1,3,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19550926_6723,6300,1636135,11238,10251,1,3,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19710417_5834,2518,10625,10034,3618,5,7,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,20000404_8057,2653,1982188,41810,24690,10,2,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19840601_9083,9769,1428913,38489,13053,5,5,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19861101_5236,671,1151912,8632,20941,8,5,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19530220_5996,786,1107236,29118,7535,4,3,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19730310_6109,4858,564675,49969,24557,5,7,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19760209_3437,6659,1142889,46073,5041,0,6,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19600221_6870,9069,769069,39266,18793,0,7,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19590213_8141,8387,1094588,9123,24973,1,0,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19600330_2722,6645,242870,17100,28476,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19450121_1948,1847,1087420,9668,24302,8,2,3,0,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19550104_2726,6920,1801736,11207,777,10,1,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19981112_2032,9435,188083,3501,2048,8,3,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19520419_9095,2364,1187232,41219,10538,7,2,0,1,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19500304_9503,5886,1911627,46972,7340,10,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19940103_5023,7021,1042203,22797,27094,6,5,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19501024_5370,4968,1029173,24953,12887,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19550224_6576,2467,1756347,16968,16458,5,3,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19750313_7219,3148,176953,36730,4315,9,5,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19740321_5832,5951,121939,1704,942,0,7,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19900230_6117,2654,607319,4950,20863,3,4,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19621016_4278,3414,1499170,20388,14835,8,4,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19600505_1049,1475,606336,39652,4246,1,2,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19860115_9541,1517,1609987,10045,13310,0,2,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19540902_7497,8574,1550153,1329,15309,9,6,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19700210_5961,9173,1733147,4391,1398,10,0,2,0,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19490911_3993,231,879682,13032,20247,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19890324_2132,8463,698827,6870,22220,5,7,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19480409_7165,162,248755,28198,8058,10,4,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,20000530_1192,6876,87058,40131,4955,7,4,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19500720_8108,7087,603086,43556,4730,4,2,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,20000615_1138,5920,1439845,39411,24970,4,2,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19990206_1856,6460,410229,7231,3020,7,1,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19901004_6930,5088,1076103,24223,19991,9,2,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19750816_8065,8946,763670,20593,15578,5,2,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19941003_3037,1449,1756195,17042,5059,7,2,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19860714_6198,5177,356048,4845,22930,2,6,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19871112_9583,3647,861620,31289,22810,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19480228_8153,1311,27693,1812,490,7,0,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19770811_1481,2822,1201154,2643,11821,6,5,2,0,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19500417_5718,9860,377876,33522,22187,10,6,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19900709_4000,8150,129381,43768,3667,1,5,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19821129_6016,9064,28807,14713,19765,0,6,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19920406_5316,2147,1068581,8640,14592,3,6,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19970801_5781,1114,1588536,35024,13548,6,5,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19770308_6447,2260,152973,17262,25913,1,3,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19590104_8830,563,1467066,26406,2218,3,0,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19880909_8027,3181,422550,42714,24565,5,7,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19600921_8398,9170,567556,42250,29127,1,4,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19591218_3705,5207,1309334,18045,18939,9,2,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,20010527_7404,3965,1370960,49439,8645,10,1,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19581215_5562,5405,189474,12303,27111,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19480404_8230,2253,656885,30400,5255,8,6,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19510525_6932,7799,1165517,41827,13082,5,2,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19710803_7436,7620,320715,42753,5316,8,0,3,0,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19540726_5549,3011,1328842,31458,28859,7,3,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19900406_9156,4172,51918,39787,19910,2,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19860411_1022,6261,1619306,5086,2632,7,5,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19501110_9626,8714,564230,4780,6352,4,1,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19610430_8330,4858,1963624,29617,29245,8,2,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19680416_6049,5061,793179,7916,29208,3,4,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19571023_2396,8693,1489011,25499,25100,4,7,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19741102_7907,5920,287975,40016,28091,10,1,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19950615_5554,8562,365388,31794,11981,7,7,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19750806_5422,9122,1747755,48430,22134,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19970203_9000,506,895792,40505,23548,5,3,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19890519_6703,1241,240283,41804,25629,8,1,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19990723_7316,5020,727083,41935,14294,9,6,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19740918_6658,9587,40172,25678,18659,0,4,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19960118_8379,7255,646031,37704,8887,6,7,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19760412_8143,1720,573278,33825,12117,6,0,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19941020_7621,8665,67592,6109,6883,9,6,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19501027_8693,5919,211033,46063,17401,1,6,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19530424_4573,543,742733,35387,11572,3,5,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19700223_9361,3167,674707,20372,16582,9,1,3,1,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19890808_2864,202,843264,39686,27115,5,6,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19900207_4508,9010,1854213,21936,2695,0,0,1,0,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19911204_3761,5100,244681,15136,3323,3,2,3,1,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19930309_4305,480,1241695,626,24276,10,5,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19580416_8056,9874,191173,34114,25348,3,6,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19771104_4107,7523,1061011,18871,13780,10,6,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19811203_9648,771,1147262,24419,19930,6,0,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19860730_7507,7523,1465976,36933,1305,5,2,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19990216_9726,2989,1437969,22015,5802,0,7,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19570216_1189,5193,289335,39177,26639,4,3,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19851225_8605,1270,1499946,40389,3140,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19730911_6435,3774,32429,23785,22683,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19700403_5006,9424,1243624,5851,29983,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19861203_9882,6316,234339,2406,26620,2,2,1,0,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19850228_9364,7431,1498674,20080,15435,8,6,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19990716_3708,9607,1031511,45156,10250,7,5,3,0,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19620104_2940,1776,1542728,47095,5018,0,4,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19610813_8423,6062,741943,48612,18916,1,4,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19860428_6838,3539,1130501,30834,12250,5,0,2,1,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19681206_4718,7683,1060424,29886,29418,8,4,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19470701_8246,4623,1809895,11901,23534,0,3,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19830526_7767,155,181312,42245,13971,7,1,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19670428_6193,900,159895,14295,3658,6,0,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19821121_8961,4424,1870027,1824,21058,2,6,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19790804_9455,1046,1288411,44074,486,5,1,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19701112_5153,9637,737331,220,27376,5,3,3,1,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19540807_7367,1221,1780992,30942,29416,2,1,3,1,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19750721_3533,8163,728566,23083,22975,3,5,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19980418_7792,6303,980596,21487,23400,2,5,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19730524_3481,9319,1357606,18804,28555,5,7,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19540709_9064,8406,1637065,13583,507,6,0,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19790607_9811,5316,1705444,32059,29136,0,1,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19950922_2891,7067,1735860,18109,10865,3,4,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19921211_8796,8202,129428,40323,4135,7,3,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19670408_9908,3705,1390675,47799,27915,4,0,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19530701_7672,719,755925,20450,18941,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19960609_3512,4051,663341,4489,15199,2,7,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19630229_5124,1134,1258649,31045,11437,8,6,1,0,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19830827_9098,5220,1492917,41429,9784,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19960803_2800,4167,1787318,47397,14714,0,0,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19900328_7369,8719,1522097,46628,28132,2,7,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19820106_8384,4608,1856030,8617,13122,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19740303_8529,5637,22918,19321,19641,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19450101_7094,6964,193665,13414,14755,8,3,3,1,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19840201_7138,7880,559256,39534,6642,6,7,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19720119_9484,5029,1129084,23205,15588,4,2,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19920702_2210,1222,799882,44801,13672,1,6,2,0,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19890918_9773,2805,897167,22949,8391,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19470217_3593,1927,1749843,46684,27263,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19520525_5848,4979,1786227,8507,13265,9,4,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19561111_6418,6938,1933437,46893,6045,2,2,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19780823_4048,7629,1326066,43502,312,4,5,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19881212_7063,230,1712066,48763,18013,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19840930_4667,1687,1460452,29424,3883,10,5,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19631025_4056,8135,112217,3584,10492,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19781006_6621,1895,529708,30294,29019,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,20010709_9121,7436,776242,3798,25940,0,6,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19621020_7458,1363,1998985,43535,20261,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19971017_7081,3027,1734408,25894,27285,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19660521_9321,1802,944914,28675,2643,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19580823_5703,1313,259331,46974,29557,8,2,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19780415_6668,4977,1930997,8617,14111,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19980923_5576,7704,1514420,12877,24027,5,3,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19921107_8381,7075,930421,28874,28869,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19801012_1100,1027,849561,14032,24055,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19910502_4545,9010,1041912,11264,17928,6,6,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19810210_6773,3831,1025,44226,22025,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19740214_4006,4476,653740,44916,5748,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19910504_7231,7722,892717,4407,902,0,3,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19560117_9117,7905,1203707,38666,9092,6,6,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19991108_9115,9092,1962910,40976,3394,6,7,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19720907_6044,2413,254925,27606,27033,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19951026_2513,3575,163326,7395,10826,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,20000223_8798,407,917900,33279,27709,1,7,0,0,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19690823_3118,9966,1454283,20955,9265,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19980320_1300,2989,1003173,1119,17848,9,6,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19900218_1433,3446,1718931,1795,17759,1,4,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19660124_4783,2947,3674,35748,10378,6,3,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19641108_9208,609,705011,29148,17017,10,4,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19691022_4508,9964,1185800,14839,16130,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19521217_5723,5480,1844890,8317,12279,7,5,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19700501_3157,9002,220128,47444,17233,6,2,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19521008_1497,2694,498398,18569,12487,7,4,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19690417_6017,258,1337416,41954,8137,2,0,1,1,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19920229_6143,4030,39445,43080,12768,9,0,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19810825_3000,2100,1901401,33456,23558,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19900907_5477,3916,455370,2074,19510,1,2,2,1,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19720818_4771,1549,761273,11091,12115,8,3,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19580114_5494,889,84422,23287,11373,4,6,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19821026_3177,6331,558601,40573,24979,1,2,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19700816_9785,1270,1230138,3459,26671,5,4,0,1,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19861014_6009,7803,340442,41936,3757,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19930207_7101,9981,1131655,36283,25416,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19550426_3977,4938,118246,4749,22769,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19721110_9272,9110,1717402,34170,15245,3,3,0,0,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19470127_9959,8265,903737,7353,224,5,0,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19650115_3263,7008,1478611,6015,26668,5,7,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19500514_4484,8551,332747,21256,11499,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19711228_5941,213,949339,25192,20664,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19770601_6229,4022,332902,3310,5970,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19990903_1872,2921,1685802,36219,19796,1,2,0,0,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19950808_2573,3754,1150817,3793,13042,5,2,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19870319_7187,6687,1246323,39275,28219,4,6,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19760607_7442,9059,31430,25110,27628,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19771113_8208,2872,831146,19439,6552,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19670414_9892,4830,1027715,37931,20019,5,7,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19610103_6250,5894,1276330,48979,25555,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19771012_9219,8718,1011647,38951,22806,0,0,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19810818_8129,5317,1015741,16085,16488,6,2,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19551129_6675,6494,765720,15089,16740,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19470315_3276,4385,63340,26124,22938,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19840929_4740,5272,367327,29972,23949,8,2,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19660619_3131,7211,689660,35629,29036,10,3,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19680110_4949,2052,254550,4466,8866,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19950824_9603,4353,767634,17329,3644,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19940409_8397,5732,1838305,44017,20033,8,3,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19860107_5936,4866,1888104,20066,13956,9,6,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19741120_8440,4607,227471,33847,17253,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19550521_1931,9524,1861010,30899,2316,7,7,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19761214_3529,9634,1130155,3708,29887,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19750825_1000,6869,256106,37344,10891,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19730121_8910,6091,613151,1395,2221,6,2,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19510508_7667,3415,800513,24532,27188,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19501119_2541,1988,922510,7800,6192,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19730903_2704,547,781642,34272,15456,0,2,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19830805_5070,655,1319356,17531,11090,1,5,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19600717_8913,7793,1969098,27803,28747,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19460811_2582,6944,1791232,16226,8895,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19501227_8672,9432,465880,21940,28083,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19501126_4258,4665,776569,32226,28045,0,5,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19770113_1522,4842,262804,47677,12218,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19550324_9424,9901,1850911,27130,12416,1,7,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19941211_7144,1163,776801,28120,15066,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19980918_1054,101,1648606,25693,7842,10,6,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19591008_1374,4810,1321033,9823,22732,4,6,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19880626_9020,3346,5296,6533,16562,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19890915_2068,8971,256202,7979,21380,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19471009_4453,1323,55642,3783,17979,9,1,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19810415_1705,6134,457220,15397,9686,2,7,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19860501_3723,3517,832926,27282,22941,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19960424_1879,9838,497250,38742,13934,0,3,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19480209_5862,5219,315704,8825,23407,0,6,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19830407_9294,7660,506196,25920,7001,8,3,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19570412_4658,6808,1833865,42919,14051,5,0,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19740826_6060,4008,10762,41062,27519,6,4,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,20000621_7495,3084,1224608,9720,47,5,6,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19820216_9505,2768,304849,35731,23361,6,5,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19800319_9070,4480,572994,15598,4724,5,6,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19580415_6344,6310,1836444,32362,17222,3,0,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19990517_7961,9442,1962829,44915,13083,5,6,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19721220_2515,3906,1126215,18923,7883,4,0,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19770802_2996,967,1905881,28933,16483,4,6,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19850929_8843,6490,866312,19081,19410,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19631129_1988,877,89909,24607,18274,7,3,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19990322_6528,9311,566689,43070,11935,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19770703_6831,7075,1215187,12059,27660,1,3,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19540325_5864,540,929355,38604,9918,4,7,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19840708_9545,4071,1246631,11874,8418,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19940403_7138,3267,981600,40478,17217,5,0,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19460404_9455,1839,320461,9156,20110,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19491218_4780,7214,167786,9921,8627,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19610613_5646,4670,73986,46874,1797,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19821028_7050,1963,1257065,43473,17838,8,1,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19490630_8620,3298,563492,18856,23299,7,2,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19860703_8053,2723,912226,13840,13577,8,6,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19560502_4759,8423,225827,26014,7858,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19931118_3677,1208,1838814,34047,17178,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19971219_3037,2612,324889,44649,26866,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19821008_1841,8056,1094201,26864,7534,4,5,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19600716_6906,8565,335517,30121,16309,2,2,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,20000613_7335,4588,1452127,43772,28311,3,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19570105_5539,6256,835118,26258,28785,4,5,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19910425_3907,7759,26562,16483,29518,6,4,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19910710_3555,9869,121986,13804,3779,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19780925_4583,5026,262903,14099,9888,7,2,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19930104_2199,3697,767495,4680,28048,5,3,3,1,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,20010518_3381,8611,738701,22370,3592,7,1,1,1,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19830524_3647,8011,589301,8746,23884,2,6,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19470308_7630,2841,1678516,14251,22363,7,3,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19830414_6907,6920,1965746,4690,16716,2,0,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19571230_3672,1788,1554014,33973,13416,2,0,1,1,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19790224_5729,9285,10512,26421,5134,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19500322_3793,972,1939598,49079,5294,5,1,3,0,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19870716_7527,2557,1182742,27105,4684,6,2,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19780108_3034,2040,1827989,45868,27115,4,1,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19731025_7764,1756,471028,8766,5982,3,4,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19900909_5392,1800,1380534,34668,20258,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19730409_7357,4529,340835,6911,6616,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19661015_7183,4143,201013,2843,11699,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19600427_1134,8278,286970,336,15537,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19500330_9694,9179,1088273,31663,525,5,0,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19531203_6673,9190,474592,8006,21650,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19521025_3379,3159,494690,32898,19444,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19730321_1334,8676,537257,12210,25015,0,3,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19820623_5294,5704,1763975,27204,8721,8,3,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19851111_1374,3336,422364,19186,19964,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19710526_9600,1959,994040,22044,23583,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19850605_7568,5136,1601042,15077,21221,3,0,1,1,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19851102_7901,1055,1785923,20309,1073,5,7,1,1,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19880123_1442,3216,1615993,40477,28062,8,3,2,1,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19680220_6462,7316,1773267,23007,4901,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19801207_1816,7875,634333,39295,28043,6,5,2,1,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19490823_5581,2912,1813396,38818,23159,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19871218_1206,319,1592047,44056,21489,5,0,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19920622_8465,2210,321858,28953,29966,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19960818_7938,379,86296,30008,5666,2,0,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19710406_7909,4238,1294543,9322,25506,0,7,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19920821_1678,4224,40146,45518,18361,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19930602_7202,1858,469273,33032,25337,4,6,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19470611_2775,9004,1725524,17068,10750,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19740706_7023,5769,1933600,48645,13127,5,2,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19960812_9904,6428,22241,23920,2478,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19471106_1310,5965,1674752,20860,8630,2,2,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19540706_5694,8980,1103556,47211,11272,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19580414_7667,2023,1423045,5703,17777,10,3,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19991119_8865,1086,1098218,34586,1524,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19610709_2519,7613,1702614,39379,7768,10,4,0,1,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19720725_3618,1936,1076715,10855,15200,10,6,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19811120_6815,8781,258974,20367,28854,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19531101_9099,7590,1330943,23113,15215,4,1,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19790702_6314,4947,626477,25121,9918,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19541203_7070,2538,1630332,48361,18027,10,5,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19530403_8509,8249,1413089,47932,18932,2,0,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19510127_4101,7573,449685,14931,28776,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19461121_1559,1091,242440,16862,8135,8,7,1,0,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19450505_2553,5149,671437,12439,6944,0,2,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19480802_8829,7965,1499826,34704,2829,2,5,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19840618_6592,5041,972810,48346,19927,2,7,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19700108_5521,1689,207846,11786,3143,4,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19740408_9807,170,51053,1335,17251,9,1,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19700313_1131,3887,1568394,20329,29132,8,0,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19850416_2483,9986,366748,3665,27018,6,2,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19461109_2332,5618,1829446,44625,5990,1,7,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19920429_3079,3299,373063,46877,8,4,7,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19550108_5590,795,810838,42580,10655,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19771115_4226,3385,487757,43041,11148,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19480828_7717,3811,1687761,17632,13618,9,2,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19930514_8568,9268,1331912,3302,4021,5,4,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19820308_4185,8125,62557,11332,6562,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19780719_5333,9854,975149,24078,4836,7,4,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19640522_3095,3833,416580,35283,10106,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19920407_9314,665,1741783,31888,9117,2,3,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19810108_2021,6447,67264,11283,14205,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19511223_3864,9221,1280494,3398,29986,4,7,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19601028_8662,6308,1638474,22366,16864,4,5,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19761123_9319,9432,1116321,20435,21922,2,3,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19620817_3412,2095,658199,26977,15707,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19650528_7503,9253,133574,29572,8002,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19920216_5987,8912,1474645,32958,6375,9,3,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19910413_9479,4733,517227,49781,29484,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19810701_4524,903,1078037,11234,28273,1,2,3,1,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19981112_4581,37,1347609,19812,135,1,0,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19730430_4427,8960,1505418,12864,27405,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19860204_7527,6789,1978524,48544,23797,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19800804_1841,790,1967934,14166,13745,7,4,1,1,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,20010905_3101,3278,1674893,20649,21597,9,7,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19620108_4013,9686,210909,28576,16931,8,4,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19511225_5116,305,587050,39012,7570,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19780912_2710,5110,258972,35427,20633,8,7,0,0,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,20011014_9383,5621,222437,32823,27515,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19801012_7097,1919,1797572,33116,10408,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19520624_6803,1182,1050925,29521,10180,3,4,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,20000802_4943,7805,268969,20702,20721,0,4,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19651104_2938,8333,596584,25493,10004,9,1,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19610713_4963,6196,1521126,10451,4612,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19550220_3330,7902,229958,44272,20752,1,3,3,0,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19781029_8031,3021,473382,25502,23331,5,7,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19540712_1096,7792,1764765,4312,4607,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19960625_1585,3135,596949,21741,23819,4,0,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19561011_4262,6729,1262971,36351,4459,6,1,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19650514_9766,5349,595860,40387,3447,9,5,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19790809_8840,949,735374,17701,23649,0,2,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19950129_8433,558,1745117,19633,11398,0,3,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19541109_5583,8890,544786,24925,9015,1,3,0,1,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19760625_1790,3664,794156,41250,17161,8,7,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19550602_2382,816,1165055,39672,16923,2,1,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19750822_6596,6485,1071820,5883,19875,8,3,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19630307_4887,2907,900464,24862,15843,3,7,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19610815_2924,7123,1443641,45153,1391,3,5,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19471104_6428,8547,1870282,44643,9312,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19971109_7201,331,1969295,8141,4615,8,7,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19660822_2572,7295,1656891,33822,26194,4,7,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19550515_8001,622,843097,38749,16601,6,5,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19480207_2649,457,1668797,20054,1708,2,0,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19961020_7400,4183,1195967,45363,15805,10,1,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19810115_9355,987,255634,33124,10992,4,3,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19770322_4056,8323,138775,45198,24580,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19790118_9386,9468,1944802,40264,10000,9,3,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19571204_1221,4518,1215423,1910,25734,5,1,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19950411_2361,753,1620797,26623,22066,4,6,1,0,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19610130_5714,1304,128870,16927,16906,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19611127_2836,8931,1536300,46853,797,0,7,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19780824_4045,9932,868793,31416,22664,2,3,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19490303_8116,5138,1124821,12619,14029,1,3,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,20000205_5999,1255,812092,36252,8393,2,6,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19610105_4117,3199,228598,15189,23313,0,5,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19910413_2356,9007,554760,32594,284,8,7,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19860225_7672,6812,1109280,8364,7435,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19520514_2009,7640,477057,23570,3442,6,7,1,0,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19540411_5230,5880,1959826,48196,7290,9,5,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19460121_7522,3708,964122,99,8622,6,4,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19571226_5752,2709,486381,15338,10577,2,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19520905_9849,596,1462286,35299,9321,4,6,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19610330_1445,1827,229974,45368,5238,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19780420_4135,1168,8341,25343,23551,1,0,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19570129_7032,6647,632109,41552,20146,8,1,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19681220_3284,5194,495313,9536,19568,8,6,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19660520_4478,9336,409207,31514,16199,3,1,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19620614_5395,1041,886280,28238,18864,5,6,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19760619_8036,773,928640,32637,26272,0,3,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19771229_3619,8621,1614872,39152,25144,10,6,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19500307_1108,9945,807205,31198,4743,3,5,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19950101_7993,5590,921781,31315,29975,8,6,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19961114_4939,8737,1593853,10974,20229,9,4,0,1,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19700722_8726,9234,460934,19440,27813,7,7,3,1,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19700108_5078,8009,1856332,41213,17184,7,1,2,0,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19991004_7196,7083,1897739,31453,9842,1,5,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19610816_4407,96,93189,30436,20418,6,2,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19830805_9944,2345,199447,689,20844,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19650705_5054,3373,1389531,23899,24257,9,3,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19470329_8439,1355,1589167,27197,21541,0,4,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19761109_8028,673,1144250,30278,25166,2,1,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19540411_3911,7145,1875094,16825,23831,8,4,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19460804_2445,3011,1102793,47400,2616,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19900319_7981,1216,1626849,35147,11861,7,7,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19870725_2205,1842,568650,2314,17708,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19980803_9617,4570,1979271,40700,16726,10,2,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19590204_4112,9425,857957,3491,15552,9,0,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19451230_9902,5048,1897753,27159,19113,4,5,2,1,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19611123_4812,6553,236812,755,21051,9,0,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19621211_1243,433,1889092,25664,11604,3,6,0,1,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,20001216_9148,845,1429251,30214,17162,6,5,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19960220_6248,8597,952179,26119,2012,9,6,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19910312_9908,2506,1038384,43223,7163,8,0,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19571219_5037,8849,442563,12396,24206,8,7,1,0,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19871224_2096,57,957223,9197,17491,10,5,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19810219_3084,7496,1243069,42452,16849,5,3,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19910126_1593,6932,1433682,20762,13797,2,7,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19520523_6520,3037,300789,3220,18106,0,1,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19951011_3479,9721,715493,11218,19230,6,3,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19471219_2972,1631,1734631,1407,7480,7,6,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19650727_7587,5163,1062466,17636,25473,10,3,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19600602_3915,6556,4125,26950,6633,9,3,2,1,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19601014_8062,8828,782569,2055,2757,5,6,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19870824_3762,9258,1652944,6620,23889,5,7,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19841117_1177,1538,930042,23567,6960,10,7,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19920707_1060,974,1068438,44219,13570,8,2,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19570926_9019,4437,174351,31681,27204,1,6,2,0,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19560615_8020,3867,1291824,29955,18006,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19590915_9394,3114,1857785,26621,13593,4,4,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19831219_5062,6524,789862,36554,112,3,0,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19651130_2322,5533,904439,31338,22897,5,5,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,20011127_9138,7139,1866616,7337,6028,1,1,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19530905_2116,152,1590436,2221,10595,7,5,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19491030_6244,7751,1705222,10514,1587,3,4,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,20010812_4108,1188,1966998,7926,4778,5,5,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19650128_3434,3396,1824712,35774,19320,3,6,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19511213_5001,2299,9350,43009,6597,2,6,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19730519_2353,1925,410394,33608,3750,5,7,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19900109_4314,1611,1153715,18359,18488,10,5,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19910123_6331,8729,1047741,22119,17971,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19450314_5717,9752,811146,7972,17551,5,0,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19710306_3906,9883,206601,14862,22460,10,5,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19490923_7590,9495,32554,4721,9632,5,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19691208_8749,6448,374838,27214,7190,0,5,2,1,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19880824_4131,5840,1613612,23996,12396,3,4,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19831114_9561,1831,442220,16601,15773,4,6,1,0,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19821124_4146,7547,1355767,24872,16435,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19650122_6960,180,917345,42374,20788,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19660330_9230,4470,9744,20095,16434,7,5,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19540224_7834,7796,1158157,32776,29000,5,2,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19810326_9597,5215,224256,43228,9079,7,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19920816_8258,305,1947294,38570,6368,3,6,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19570612_1699,2260,1096585,47774,6667,6,2,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19870630_6668,7444,508589,47706,23146,9,3,1,1,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19620307_9787,2230,1603201,34309,8276,0,5,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19790603_8438,353,60510,3057,9419,2,3,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19870109_8394,3394,1569842,23180,12237,10,0,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19961005_2199,9216,1863378,20649,11856,6,5,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19461101_4080,1922,778940,33246,25736,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19880914_8361,5817,903804,36333,13942,5,1,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19651002_2255,6625,298886,48250,19426,1,5,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19910924_8883,2192,179254,23287,18439,8,3,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19820602_5920,3974,104294,29999,18635,5,6,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19730416_9650,8786,665980,35556,14938,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19600605_5105,5107,904221,11994,13430,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19631115_7359,8252,1839267,6795,20685,0,1,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,20010323_5200,7140,817546,49864,5976,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19580708_9255,6896,1163821,26204,28281,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19480304_9822,2963,1861513,12534,18996,7,0,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19801030_8657,3254,41286,8703,17756,10,5,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19470713_6333,4343,1256947,46304,18087,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19640217_8776,8111,1696946,42118,25621,8,7,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,20001206_3008,2086,661006,13543,13258,9,6,0,1,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19940612_3243,8331,842546,38791,25238,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19450506_6478,3933,704464,49804,10962,1,6,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19750217_5642,8101,1614291,26214,25576,7,0,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19670701_9387,9007,75203,40314,6034,8,1,0,1,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19811105_1206,7439,814885,33202,2495,5,1,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19530909_8409,4602,907716,17348,16271,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19750821_5648,6440,1127648,22739,18629,0,5,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19501006_3934,5530,389826,27489,10678,3,7,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19810420_2707,7368,1142810,16179,2707,6,4,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19571116_2108,6727,1401327,16784,21022,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19720907_8374,9681,324628,1837,27967,1,2,1,0,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19700914_3498,7943,658760,23393,6778,2,6,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19880609_8035,819,1103862,46581,16971,9,1,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19741007_1077,4926,1879820,43392,1773,7,7,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19780322_2677,7482,1188050,45412,5093,7,5,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19650612_3982,7103,1262424,17073,15039,5,1,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19450329_4686,3919,620169,39985,29769,10,3,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19590811_6504,1628,504360,7854,3569,3,7,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19861223_5971,443,75090,43096,21324,2,4,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19680425_2532,9099,1603824,24935,4516,5,5,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19740121_9114,6521,914508,47921,6279,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19890728_6556,9415,1053131,47262,3817,10,6,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19950718_6824,2382,304363,7712,29565,5,7,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19671022_7156,5077,690793,24055,20496,6,2,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19700421_7805,5659,2777,16339,23146,10,7,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19840411_7324,6243,342363,32597,24589,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19520621_8445,6924,860020,45231,419,8,5,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19701108_7020,3640,670562,39589,5186,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19991004_3003,3796,1784607,9259,28985,9,2,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19480901_7786,3470,1882701,17839,23345,9,2,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19870813_4277,2343,812047,31553,29947,2,4,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19670504_9914,8281,1759049,34291,28373,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19490301_5297,2423,220404,31335,15904,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19660101_5288,814,281374,41531,26091,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19480118_7612,88,1936270,49388,5912,10,6,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19970313_5336,2301,1975060,41338,3633,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,20010801_5078,9291,395400,17902,18612,7,1,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19480505_9707,276,975944,30926,12930,10,3,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19530802_3718,1647,449663,9255,15492,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,20010110_1542,8575,1096298,31234,3493,2,5,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19950921_6689,4458,1006247,38405,5233,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19790212_5448,4628,1699509,44591,12396,2,4,0,1,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19930226_9817,2467,255662,47199,10081,10,5,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19880226_3862,1464,1081757,11296,3992,0,1,0,0,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19530527_8507,8985,18720,11392,7322,10,4,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19650316_4473,7489,325128,41184,1617,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19690724_6258,7795,684614,35151,19285,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19561115_1198,8215,1262656,47243,18740,1,2,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19980103_4508,4246,528834,17594,545,8,1,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19560525_9036,3335,597973,48633,19958,10,6,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19581104_7373,5064,1498892,29480,24820,5,2,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19460411_2542,2845,1509647,12847,12429,6,3,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19490917_1761,9399,822897,18030,4694,8,7,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19541016_1799,8425,45114,12235,28706,3,1,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19620119_4712,5318,516917,24769,25016,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19541028_3985,3293,839599,22594,753,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19861022_5505,3773,1607163,39094,19572,3,3,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19460322_4600,5170,615064,49213,1196,9,7,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19821211_8169,3652,1636297,7588,20707,0,3,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19850906_7562,5719,165103,36625,6361,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19621211_3311,2788,682966,43825,25302,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19970118_3939,2846,596217,3643,5831,7,2,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19960423_8655,4724,1377417,30368,1548,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19540425_5777,1457,1739769,44341,4517,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19810519_8434,9582,1010015,25698,1496,3,2,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19751001_4398,160,348448,25119,29479,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19740529_1953,1680,97684,42157,21181,5,4,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19960623_4080,426,1386539,5885,25943,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19970811_7599,6367,454287,45268,19410,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,20010714_1945,6576,1968475,17778,16775,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19600622_9523,6933,753951,30507,22231,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19920710_1397,9491,919902,44652,10395,7,6,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19980807_3406,1415,767672,28627,499,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19800603_2683,1687,1700725,21181,9293,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19800129_2731,5704,594334,35778,11918,1,4,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19920908_5248,4920,623094,4750,3652,7,3,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19800611_6733,6313,613519,19796,19095,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19490630_4829,5879,1410721,44324,17700,4,4,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19740105_5509,2479,893232,42929,13452,2,7,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19640712_7824,4569,1748741,8428,20811,0,7,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19760323_5319,6725,1000252,42197,18540,5,6,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19670411_4288,1086,1951422,22522,3417,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19630507_6850,1373,30190,46315,3934,10,1,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19941106_1729,394,212749,2517,20261,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19551123_2122,2171,1184301,20175,819,5,0,3,0,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19521201_5893,839,413448,20964,24917,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19590914_9692,8637,14981,43660,2025,5,5,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19800518_2646,4400,892312,38568,7177,9,3,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19860524_9463,7801,1834236,18866,488,10,2,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19480304_1618,6727,659839,16725,25039,0,6,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19510930_9524,9334,898610,29336,19956,9,0,2,1,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19680322_5844,3941,841584,43311,19414,0,2,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19501116_7639,528,161917,22608,13793,8,2,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19971111_4140,6484,154637,39839,6665,4,3,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19580201_3253,388,1451685,5990,10338,2,5,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19620310_2878,2579,80314,15189,29770,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19610427_1191,7119,1681691,41185,24864,0,5,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19620725_5885,9006,916396,47445,4032,10,4,3,0,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19690819_1041,7284,1464101,2473,22820,9,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19470112_3348,9194,17477,3349,23786,4,7,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19890814_4424,8054,1642180,8383,14066,7,7,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19761208_5389,5954,1841286,12357,15167,6,0,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19890920_5993,6571,482177,17217,2848,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19550201_7568,1104,1804228,41245,7381,1,2,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19511125_7324,6530,1080376,13776,25508,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19481026_2416,6750,311879,12875,11238,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19630817_5763,607,949835,10158,28462,7,3,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19620628_8045,3294,762639,47612,2194,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19770721_4351,5963,1474119,28168,6374,6,1,2,0,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19730726_3598,2716,1189462,22199,4514,2,1,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19960102_1171,505,1799895,27182,2859,1,4,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19700502_5252,1936,1885918,8380,2003,2,2,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19650219_6817,5106,1004413,13249,11398,6,0,3,1,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19840730_4201,3372,1976778,38291,25727,9,1,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19840623_8653,3766,598872,4347,13664,8,0,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19640503_2285,270,732419,19162,5160,0,5,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19740923_3905,3476,202971,12952,5152,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19931010_7223,2068,531424,35112,21721,8,0,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19830604_6588,8330,1251337,3280,27780,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19570330_2085,9844,1406283,3952,24974,9,3,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19500202_2810,5184,1007109,5158,8876,6,6,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,20010522_8271,1397,820844,40346,17266,5,3,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19740613_7387,424,1967313,1675,9801,3,7,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19830427_2274,6468,144795,18200,2691,9,0,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19960804_8591,7037,1371794,29029,25302,0,0,2,0,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,20011201_2504,2486,1152560,38187,14591,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19770121_5904,5766,1962060,22914,15280,0,6,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19810707_6980,5539,1763331,35687,24235,1,6,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19761219_6586,9477,35227,49711,8845,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19890828_5653,1867,1983718,31358,16873,0,3,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19890724_7147,2852,1444462,24630,18980,2,3,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19470511_3100,9343,552793,29378,26598,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19990705_8795,893,4469,27635,28786,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19880607_8437,7183,1792971,35872,2475,7,4,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19761013_5348,9404,1251420,5904,14718,7,7,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19721017_6988,2418,1000002,12362,3265,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19560514_9117,4280,1750696,18726,5772,8,6,2,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19970827_7762,7162,1548528,46724,4638,1,3,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19990806_2169,6713,1924473,31339,10800,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19951214_6232,9422,1543,24320,7388,0,7,0,1,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19940822_8482,797,47006,45918,15587,7,3,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19980116_6512,8574,365762,1633,13851,2,2,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19790828_7976,7181,100095,21383,21541,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19820402_8799,326,392795,15585,14276,0,2,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19890316_1073,3253,1690938,28859,28250,7,2,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19480525_5204,3878,1462836,3099,24787,9,1,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19550802_1922,9296,905530,3909,22673,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19820211_3919,2101,1112244,5695,25487,3,2,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19510501_3676,9101,984109,7375,22916,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19500221_2732,3525,490135,7612,913,9,3,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19860101_1674,6324,1066739,46125,29634,7,5,1,0,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19631106_8101,9497,972912,38990,27563,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19600304_9365,7788,550399,10245,6738,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19800728_1114,5975,639269,29985,12896,9,4,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19660927_5866,2833,775705,38797,12944,1,7,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19720513_2501,2472,51984,36396,4573,8,7,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19860118_3421,9719,1275004,13829,29357,5,7,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19880812_8965,7625,1066119,25437,2525,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19831019_9200,6027,713344,47523,13360,10,1,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19630908_7599,480,1715315,17704,26226,9,3,2,0,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19661129_2834,620,1313269,49737,2476,4,0,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19720728_1317,4516,793457,29978,13240,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19550812_6788,4561,783298,16214,29136,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19630409_4180,9257,410794,44362,19401,0,2,2,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19840205_5389,9971,62402,1161,7880,5,4,3,1,2,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19600319_9941,5759,605797,21698,12016,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19680216_4828,7133,1151678,39388,15917,0,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19761209_3409,1814,1228812,16017,29041,1,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19840409_7269,6225,679931,24794,9436,2,3,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19630424_7266,8382,320434,44844,4882,0,0,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19610129_7231,9340,1803690,38162,21139,1,3,0,1,1,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19920328_8074,8469,1889454,14610,14513,4,4,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19831221_4630,3834,183678,21806,24195,4,2,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19740808_2210,7603,1534448,17374,2051,9,6,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19980425_3372,439,365829,1244,26381,1,6,1,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19510702_8221,2901,377972,6665,12628,7,2,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19861210_1873,7114,1497898,38631,17144,5,2,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19780427_7787,2699,877131,36832,22990,3,3,1,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19560604_1575,4631,1962804,28055,28937,1,1,0,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,20000930_8267,1830,1815361,13955,25201,8,4,3,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19930221_6706,3128,1581332,25196,6536,0,1,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19640928_1758,8112,1904178,388,27059,1,7,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19820319_5187,1215,1099774,14355,11878,2,0,3,0,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19910515_1988,1399,1342082,20844,18659,3,2,0,1,0,2020-04-26 18:01:04.746575 +2020-04-26 18:01:04.746575,19620713_6239,6396,777741,48944,8429,10,0,2,1,2,2020-04-26 18:01:04.746575 diff --git a/ui/feature_repo/data/loan_table.parquet b/ui/feature_repo/data/loan_table.parquet new file mode 100644 index 0000000000..0bdb68d8a6 Binary files /dev/null and b/ui/feature_repo/data/loan_table.parquet differ diff --git a/ui/feature_repo/data/loan_table_sample.csv b/ui/feature_repo/data/loan_table_sample.csv new file mode 100644 index 0000000000..6acdfc9a94 --- /dev/null +++ b/ui/feature_repo/data/loan_table_sample.csv @@ -0,0 +1,1001 @@ +loan_id,dob_ssn,zipcode,person_age,person_income,person_home_ownership,person_emp_length,loan_intent,loan_amnt,loan_int_rate,loan_status,event_timestamp,created_timestamp +12208,19790429_9552,30721,24,30000,RENT,2.0,EDUCATION,3000,6.03,0,2021-07-28 17:09:19.623000+00:00,2021-07-28 17:09:19.623000+00:00 +16140,19971025_8002,48893,21,21000,RENT,2.0,EDUCATION,6250,13.85,0,2021-06-08 14:21:52.761000+00:00,2021-06-08 14:21:52.761000+00:00 +16135,19670812_9247,24092,22,19200,RENT,2.0,DEBTCONSOLIDATION,6250,7.51,1,2021-06-08 15:53:38.924000+00:00,2021-06-08 15:53:38.924000+00:00 +14945,19511027_9100,76031,24,116300,RENT,6.0,HOMEIMPROVEMENT,5000,14.96,0,2021-06-23 19:54:45.803000+00:00,2021-06-23 19:54:45.803000+00:00 +35168,19850301_1492,85358,30,66000,MORTGAGE,15.0,DEBTCONSOLIDATION,15000,11.14,0,2020-10-09 01:44:17.497000+00:00,2020-10-09 01:44:17.497000+00:00 +23054,19710919_6605,37931,24,26400,OWN,8.0,VENTURE,3000,10.59,0,2021-03-12 11:23:10.074000+00:00,2021-03-12 11:23:10.074000+00:00 +29947,19640320_6051,2703,28,48500,RENT,3.0,VENTURE,7500,12.86,0,2020-12-14 14:49:53.272000+00:00,2020-12-14 14:49:53.272000+00:00 +15853,19601117_2584,48612,22,50000,OWN,2.0,EDUCATION,5600,11.86,0,2021-06-12 06:09:26.538000+00:00,2021-06-12 06:09:26.538000+00:00 +36666,20000623_4827,56621,39,51000,OWN,12.0,EDUCATION,12000,11.34,0,2020-09-19 23:30:10.956000+00:00,2020-09-19 23:30:10.956000+00:00 +35660,19480723_9980,59262,45,85000,RENT,0.0,PERSONAL,26000,14.27,1,2020-10-02 19:14:11.023000+00:00,2020-10-02 19:14:11.023000+00:00 +13394,19750702_1909,30477,21,40000,RENT,3.0,EDUCATION,4000,7.51,0,2021-07-13 14:21:37.676000+00:00,2021-07-13 14:21:37.676000+00:00 +17964,19710418_4488,76010,23,60000,RENT,0.0,VENTURE,8000,11.14,0,2021-05-16 08:24:24.369000+00:00,2021-05-16 08:24:24.369000+00:00 +16830,19980702_5793,89403,22,40000,RENT,0.0,DEBTCONSOLIDATION,7000,10.65,0,2021-05-30 19:17:42.218000+00:00,2021-05-30 19:17:42.218000+00:00 +13546,19570301_6673,62262,22,80000,RENT,6.0,EDUCATION,4000,11.12,0,2021-07-11 15:51:50.310000+00:00,2021-07-11 15:51:50.310000+00:00 +23482,19770302_1970,91066,21,62004,RENT,0.0,VENTURE,15000,7.29,0,2021-03-07 00:27:42.490000+00:00,2021-03-07 00:27:42.490000+00:00 +37320,19780515_5157,94559,36,72500,RENT,2.0,MEDICAL,10000,7.9,0,2020-09-11 15:26:44.789000+00:00,2020-09-11 15:26:44.789000+00:00 +36960,20001207_6128,62249,39,24000,RENT,9.0,EDUCATION,5000,11.14,0,2020-09-16 05:34:08.550000+00:00,2020-09-16 05:34:08.550000+00:00 +21987,19530513_1827,43215,24,95000,MORTGAGE,3.0,DEBTCONSOLIDATION,35000,11.99,0,2021-03-26 01:46:45.334000+00:00,2021-03-26 01:46:45.334000+00:00 +31554,19551119_7032,55124,27,59004,RENT,3.0,EDUCATION,10000,16.32,0,2020-11-24 03:15:12.370000+00:00,2020-11-24 03:15:12.370000+00:00 +38535,19580830_6315,35219,56,70000,MORTGAGE,7.0,PERSONAL,15000,10.38,0,2020-08-27 03:46:47.094000+00:00,2020-08-27 03:46:47.094000+00:00 +37859,19920823_7844,6355,36,53550,RENT,1.0,MEDICAL,1600,12.21,1,2020-09-04 18:34:00.379000+00:00,2020-09-04 18:34:00.379000+00:00 +21780,19840727_3972,21502,23,91000,MORTGAGE,5.0,VENTURE,10000,16.32,0,2021-03-28 17:06:00.496000+00:00,2021-03-28 17:06:00.496000+00:00 +32289,19540228_8674,24350,34,84000,MORTGAGE,6.0,EDUCATION,2400,7.49,0,2020-11-14 18:25:06.357000+00:00,2020-11-14 18:25:06.357000+00:00 +22045,19520219_1192,85740,21,83000,MORTGAGE,5.0,PERSONAL,15000,12.84,1,2021-03-25 08:02:13.839000+00:00,2021-03-25 08:02:13.839000+00:00 +37119,19911106_2989,62889,48,66948,MORTGAGE,1.0,VENTURE,24000,10.62,0,2020-09-14 04:55:52.556000+00:00,2020-09-14 04:55:52.556000+00:00 +13444,19770902_5041,42343,26,39672,MORTGAGE,10.0,PERSONAL,11500,15.7,0,2021-07-12 23:03:56.042000+00:00,2021-07-12 23:03:56.042000+00:00 +10681,19810917_9838,48722,21,21600,MORTGAGE,2.0,DEBTCONSOLIDATION,10000,11.14,0,2021-08-17 04:15:41.912000+00:00,2021-08-17 04:15:41.912000+00:00 +36662,19560701_1098,4979,37,51000,MORTGAGE,3.0,DEBTCONSOLIDATION,12800,12.84,0,2020-09-20 00:43:35.886000+00:00,2020-09-20 00:43:35.886000+00:00 +35581,19840628_1262,33617,34,55000,MORTGAGE,4.0,EDUCATION,7000,13.49,0,2020-10-03 19:24:08.404000+00:00,2020-10-03 19:24:08.404000+00:00 +35571,19870615_1213,97520,29,40000,OWN,0.0,MEDICAL,5000,5.42,0,2020-10-03 22:27:40.730000+00:00,2020-10-03 22:27:40.730000+00:00 +20823,19691221_2001,31044,22,74000,MORTGAGE,3.0,PERSONAL,10000,11.83,1,2021-04-09 21:50:40.163000+00:00,2021-04-09 21:50:40.163000+00:00 +13566,19490701_3352,2135,26,131000,RENT,10.0,HOMEIMPROVEMENT,4000,12.84,0,2021-07-11 09:44:45.656000+00:00,2021-07-11 09:44:45.656000+00:00 +28421,19850629_2882,6280,28,24000,RENT,8.0,VENTURE,8875,10.65,1,2021-01-03 01:37:54.328000+00:00,2021-01-03 01:37:54.328000+00:00 +11632,19480301_4934,72928,23,30000,MORTGAGE,7.0,MEDICAL,4600,7.9,0,2021-08-05 01:21:09.642000+00:00,2021-08-05 01:21:09.642000+00:00 +14291,19761225_2494,63336,22,34500,RENT,2.0,MEDICAL,5000,16.89,1,2021-07-02 03:58:11.970000+00:00,2021-07-02 03:58:11.970000+00:00 +30722,19650822_8482,68505,34,42240,RENT,3.0,PERSONAL,8775,13.06,0,2020-12-04 17:45:37.952000+00:00,2020-12-04 17:45:37.952000+00:00 +27024,19540608_9214,77396,31,21000,RENT,13.0,EDUCATION,3000,15.28,0,2021-01-20 20:58:16.370000+00:00,2021-01-20 20:58:16.370000+00:00 +30714,20000424_9349,11214,27,68000,RENT,4.0,HOMEIMPROVEMENT,8700,7.88,0,2020-12-04 20:12:27.814000+00:00,2020-12-04 20:12:27.814000+00:00 +36920,19830202_7159,15957,42,50000,RENT,3.0,HOMEIMPROVEMENT,8000,10.14,0,2020-09-16 17:48:17.857000+00:00,2020-09-16 17:48:17.857000+00:00 +11769,19600828_2486,13660,21,35360,RENT,1.0,EDUCATION,2200,7.49,0,2021-08-03 07:26:40.766000+00:00,2021-08-03 07:26:40.766000+00:00 +29299,19710326_8048,38452,33,44400,MORTGAGE,3.0,MEDICAL,2000,14.22,1,2020-12-22 21:03:12.043000+00:00,2020-12-22 21:03:12.043000+00:00 +26921,19910410_8856,87415,32,24000,RENT,2.0,MEDICAL,2500,10.37,0,2021-01-22 04:28:43.335000+00:00,2021-01-22 04:28:43.335000+00:00 +19922,19720812_3570,43138,22,49000,RENT,6.0,PERSONAL,10000,16.45,1,2021-04-21 09:27:30.799000+00:00,2021-04-21 09:27:30.799000+00:00 +17495,19690625_9793,53577,22,32000,RENT,0.0,PERSONAL,7875,14.91,1,2021-05-22 07:52:22.492000+00:00,2021-05-22 07:52:22.492000+00:00 +14324,19670305_2983,1330,23,43000,MORTGAGE,3.0,VENTURE,3600,15.62,0,2021-07-01 17:52:31.292000+00:00,2021-07-01 17:52:31.292000+00:00 +11248,19720926_4616,58275,21,26400,OWN,2.0,VENTURE,4000,7.51,0,2021-08-09 22:49:02.988000+00:00,2021-08-09 22:49:02.988000+00:00 +37071,19720415_9928,62060,47,65000,MORTGAGE,21.0,HOMEIMPROVEMENT,7000,14.35,0,2020-09-14 19:36:51.724000+00:00,2020-09-14 19:36:51.724000+00:00 +19306,19461130_2059,97818,23,35000,RENT,3.0,DEBTCONSOLIDATION,10000,13.22,0,2021-04-29 05:53:30.125000+00:00,2021-04-29 05:53:30.125000+00:00 +12808,19620214_7176,7802,25,36000,MORTGAGE,3.0,MEDICAL,5300,7.49,0,2021-07-21 01:37:00.021000+00:00,2021-07-21 01:37:00.021000+00:00 +29544,19750509_3504,89116,27,53004,MORTGAGE,10.0,EDUCATION,5600,11.86,0,2020-12-19 18:06:30.039000+00:00,2020-12-19 18:06:30.039000+00:00 +29673,19730215_7839,98040,27,54996,OWN,5.0,PERSONAL,16400,13.98,0,2020-12-18 02:38:51.024000+00:00,2020-12-18 02:38:51.024000+00:00 +16093,19740421_5101,94026,25,50988,OWN,9.0,HOMEIMPROVEMENT,4200,9.99,0,2021-06-09 04:44:30.697000+00:00,2021-06-09 04:44:30.697000+00:00 +13575,19740315_4016,60914,24,26000,RENT,0.0,PERSONAL,4200,16.0,1,2021-07-11 06:59:34.562000+00:00,2021-07-11 06:59:34.562000+00:00 +18680,19651021_5802,78640,23,45000,RENT,4.0,EDUCATION,9000,5.99,0,2021-05-07 05:23:01.777000+00:00,2021-05-07 05:23:01.777000+00:00 +17498,19830407_5547,6403,23,58560,MORTGAGE,7.0,MEDICAL,8000,12.21,0,2021-05-22 06:57:18.794000+00:00,2021-05-22 06:57:18.794000+00:00 +11834,19690820_2550,17067,25,30720,RENT,4.0,MEDICAL,2400,7.9,0,2021-08-02 11:33:40.642000+00:00,2021-08-02 11:33:40.642000+00:00 +28564,19511005_8160,72333,28,59600,RENT,2.0,HOMEIMPROVEMENT,5000,5.79,0,2021-01-01 05:53:18.056000+00:00,2021-01-01 05:53:18.056000+00:00 +18810,19740925_9648,35653,25,65000,MORTGAGE,1.0,PERSONAL,15000,11.12,0,2021-05-05 13:37:01.529000+00:00,2021-05-05 13:37:01.529000+00:00 +20524,19651005_6967,57577,25,78000,MORTGAGE,4.0,DEBTCONSOLIDATION,2400,11.86,0,2021-04-13 17:18:28.731000+00:00,2021-04-13 17:18:28.731000+00:00 +14025,19951023_3236,70598,25,42000,MORTGAGE,9.0,HOMEIMPROVEMENT,12000,9.91,0,2021-07-05 13:20:19.860000+00:00,2021-07-05 13:20:19.860000+00:00 +11890,19740629_8255,63023,22,24000,RENT,0.0,DEBTCONSOLIDATION,2450,7.9,0,2021-08-01 18:25:51.613000+00:00,2021-08-01 18:25:51.613000+00:00 +21828,19450318_8032,33597,25,36960,RENT,8.0,DEBTCONSOLIDATION,12500,14.61,1,2021-03-28 02:25:01.328000+00:00,2021-03-28 02:25:01.328000+00:00 +38454,19801222_5014,37036,59,40000,MORTGAGE,5.0,PERSONAL,3600,7.49,0,2020-08-28 04:33:26.940000+00:00,2020-08-28 04:33:26.940000+00:00 +28923,19671125_4301,5065,32,27000,RENT,2.0,MEDICAL,6000,12.73,0,2020-12-27 16:04:15.527000+00:00,2020-12-27 16:04:15.527000+00:00 +21578,19691106_6462,91221,26,125000,RENT,3.0,MEDICAL,12000,6.99,0,2021-03-31 06:53:29.496000+00:00,2021-03-31 06:53:29.496000+00:00 +30594,19821118_1872,23518,29,105500,RENT,4.0,PERSONAL,8000,7.49,0,2020-12-06 08:54:55.734000+00:00,2020-12-06 08:54:55.734000+00:00 +20908,19751202_9380,78933,24,80689,MORTGAGE,1.0,DEBTCONSOLIDATION,16000,13.22,0,2021-04-08 19:50:35.386000+00:00,2021-04-08 19:50:35.386000+00:00 +14637,19580516_5644,70174,26,34000,RENT,1.0,MEDICAL,8500,7.88,0,2021-06-27 18:07:45.466000+00:00,2021-06-27 18:07:45.466000+00:00 +36708,19941118_5554,62979,40,75000,RENT,3.0,EDUCATION,6500,7.9,0,2020-09-19 10:39:19.183000+00:00,2020-09-19 10:39:19.183000+00:00 +11498,19780924_4710,13469,26,45000,RENT,5.0,MEDICAL,1800,14.11,0,2021-08-06 18:20:34.820000+00:00,2021-08-06 18:20:34.820000+00:00 +37558,19900526_2107,92881,36,90000,MORTGAGE,13.0,EDUCATION,12000,6.62,0,2020-09-08 14:38:31.413000+00:00,2020-09-08 14:38:31.413000+00:00 +22221,20010906_1070,14424,24,98000,MORTGAGE,5.0,HOMEIMPROVEMENT,10000,11.71,0,2021-03-23 02:11:56.889000+00:00,2021-03-23 02:11:56.889000+00:00 +36390,19500129_2822,5676,39,44000,MORTGAGE,2.0,MEDICAL,8000,9.63,0,2020-09-23 11:55:51.173000+00:00,2020-09-23 11:55:51.173000+00:00 +27908,19890524_1632,35007,30,30000,RENT,3.0,EDUCATION,4500,13.92,0,2021-01-09 14:33:26.688000+00:00,2021-01-09 14:33:26.688000+00:00 +36114,19871005_8543,37033,44,37000,RENT,0.0,MEDICAL,3500,12.87,0,2020-09-27 00:21:31.390000+00:00,2020-09-27 00:21:31.390000+00:00 +26177,19590111_3670,50479,30,72000,RENT,5.0,EDUCATION,20000,13.79,0,2021-01-31 16:04:00.442000+00:00,2021-01-31 16:04:00.442000+00:00 +26100,19640419_3706,38352,31,72000,RENT,6.0,DEBTCONSOLIDATION,21000,11.99,1,2021-02-01 15:37:15.358000+00:00,2021-02-01 15:37:15.358000+00:00 +12228,19570612_1699,48626,24,30960,RENT,5.0,MEDICAL,3000,15.62,0,2021-07-28 11:02:14.970000+00:00,2021-07-28 11:02:14.970000+00:00 +34787,19561204_2796,97032,35,48000,MORTGAGE,8.0,MEDICAL,2000,11.97,0,2020-10-13 22:17:07.144000+00:00,2020-10-13 22:17:07.144000+00:00 +28531,19550620_9829,93535,32,45000,MORTGAGE,3.0,DEBTCONSOLIDATION,5000,5.79,0,2021-01-01 15:58:58.734000+00:00,2021-01-01 15:58:58.734000+00:00 +21157,19641113_3550,54944,24,38000,OWN,6.0,MEDICAL,1000,10.08,0,2021-04-05 15:40:28.450000+00:00,2021-04-05 15:40:28.450000+00:00 +27970,19970424_7944,13167,35,17000,RENT,0.0,MEDICAL,4750,9.32,1,2021-01-08 19:35:30.263000+00:00,2021-01-08 19:35:30.263000+00:00 +16912,19910624_1983,91768,25,46000,RENT,5.0,EDUCATION,7000,10.25,0,2021-05-29 18:12:41.139000+00:00,2021-05-29 18:12:41.139000+00:00 +29832,19901116_1769,36480,28,75000,RENT,8.0,DEBTCONSOLIDATION,7200,9.88,0,2020-12-16 02:00:35.029000+00:00,2020-12-16 02:00:35.029000+00:00 +17856,19930307_9563,75023,24,60000,MORTGAGE,8.0,VENTURE,12000,9.62,0,2021-05-17 17:26:37.498000+00:00,2021-05-17 17:26:37.498000+00:00 +25214,19481123_7671,99587,24,48000,OWN,5.0,VENTURE,4400,5.99,0,2021-02-12 22:38:47.504000+00:00,2021-02-12 22:38:47.504000+00:00 +29497,19831226_5835,77272,29,52800,MORTGAGE,7.0,EDUCATION,25475,16.29,0,2020-12-20 08:29:07.974000+00:00,2020-12-20 08:29:07.974000+00:00 +15260,19760601_5360,5661,24,120000,RENT,0.0,VENTURE,5775,6.17,0,2021-06-19 19:33:17.511000+00:00,2021-06-19 19:33:17.511000+00:00 +37816,19860321_5183,62963,50,116000,MORTGAGE,22.0,PERSONAL,5000,10.25,0,2020-09-05 07:43:13.384000+00:00,2020-09-05 07:43:13.384000+00:00 +38026,19970827_4324,39095,47,187000,MORTGAGE,5.0,HOMEIMPROVEMENT,14000,8.9,0,2020-09-02 15:28:54.523000+00:00,2020-09-02 15:28:54.523000+00:00 +32636,19560501_9346,40111,28,42000,RENT,9.0,MEDICAL,12250,14.59,1,2020-11-10 08:16:18.620000+00:00,2020-11-10 08:16:18.620000+00:00 +12122,19770809_4812,52342,23,24000,RENT,2.0,MEDICAL,2800,11.49,1,2021-07-29 19:27:45.633000+00:00,2021-07-29 19:27:45.633000+00:00 +14438,19941002_9685,3766,23,43600,MORTGAGE,4.0,EDUCATION,7000,7.49,0,2021-06-30 07:00:10.767000+00:00,2021-06-30 07:00:10.767000+00:00 +15951,19750905_2595,96793,24,50000,MORTGAGE,2.0,VENTURE,12500,9.32,0,2021-06-11 00:10:45.736000+00:00,2021-06-11 00:10:45.736000+00:00 +18152,19830813_8953,13083,22,27000,RENT,6.0,DEBTCONSOLIDATION,5000,9.32,0,2021-05-13 22:53:52.627000+00:00,2021-05-13 22:53:52.627000+00:00 +20349,19620624_1342,43212,26,75873,MORTGAGE,6.0,PERSONAL,1200,10.25,0,2021-04-15 22:50:24.449000+00:00,2021-04-15 22:50:24.449000+00:00 +16662,19960313_6268,95444,25,75000,RENT,5.0,PERSONAL,6400,15.99,0,2021-06-01 22:41:09.307000+00:00,2021-06-01 22:41:09.307000+00:00 +11798,19941022_2374,98363,26,30000,MORTGAGE,0.0,EDUCATION,4000,7.14,0,2021-08-02 22:34:25.019000+00:00,2021-08-02 22:34:25.019000+00:00 +33726,19601012_5125,90608,28,91800,MORTGAGE,6.0,MEDICAL,10000,16.89,1,2020-10-27 10:50:35.008000+00:00,2020-10-27 10:50:35.008000+00:00 +30945,19550115_3170,28594,33,59000,RENT,17.0,MEDICAL,9600,9.91,0,2020-12-01 21:32:43.067000+00:00,2020-12-01 21:32:43.067000+00:00 +25354,19930815_1988,32238,23,47233,RENT,7.0,DEBTCONSOLIDATION,5000,11.36,0,2021-02-11 03:49:14.930000+00:00,2021-02-11 03:49:14.930000+00:00 +33583,19570114_5191,34134,32,27500,OWN,1.0,VENTURE,5000,14.59,0,2020-10-29 06:35:11.280000+00:00,2020-10-29 06:35:11.280000+00:00 +35932,19730604_8828,78104,37,44000,RENT,0.0,PERSONAL,1700,10.59,1,2020-09-29 08:01:55.736000+00:00,2020-09-29 08:01:55.736000+00:00 +10065,19530901_1621,22182,21,12000,OWN,6.0,EDUCATION,3000,13.61,1,2021-08-25 00:41:41.237000+00:00,2021-08-25 00:41:41.237000+00:00 +37999,19481128_5369,93673,37,160000,MORTGAGE,14.0,HOMEIMPROVEMENT,10000,7.51,0,2020-09-02 23:44:27.805000+00:00,2020-09-02 23:44:27.805000+00:00 +12667,19661216_1173,95553,21,35004,OWN,5.0,VENTURE,1500,9.62,0,2021-07-22 20:44:53.827000+00:00,2021-07-22 20:44:53.827000+00:00 +32783,19660617_5520,52531,31,92000,MORTGAGE,2.0,HOMEIMPROVEMENT,3500,5.79,0,2020-11-08 11:18:17.417000+00:00,2020-11-08 11:18:17.417000+00:00 +18641,19530822_8321,85204,22,40000,RENT,2.0,EDUCATION,9000,10.59,0,2021-05-07 17:18:49.851000+00:00,2021-05-07 17:18:49.851000+00:00 +36453,19940823_7643,76307,43,71015,RENT,8.0,VENTURE,5000,11.14,0,2020-09-22 16:39:33.515000+00:00,2020-09-22 16:39:33.515000+00:00 +38272,19951202_6841,32777,44,61000,RENT,2.0,DEBTCONSOLIDATION,11000,16.02,1,2020-08-30 12:13:51.286000+00:00,2020-08-30 12:13:51.286000+00:00 +15711,19550503_7933,13108,26,44000,MORTGAGE,6.0,EDUCATION,10000,5.42,1,2021-06-14 01:35:41.577000+00:00,2021-06-14 01:35:41.577000+00:00 +38246,19610623_9145,53179,37,24000,RENT,2.0,DEBTCONSOLIDATION,5000,14.65,0,2020-08-30 20:11:03.335000+00:00,2020-08-30 20:11:03.335000+00:00 +31577,19871210_3692,39474,35,60000,RENT,7.0,PERSONAL,10000,11.11,0,2020-11-23 20:13:04.018000+00:00,2020-11-23 20:13:04.018000+00:00 +36675,19790922_3586,44093,40,68000,RENT,12.0,HOMEIMPROVEMENT,7000,7.51,0,2020-09-19 20:44:59.862000+00:00,2020-09-19 20:44:59.862000+00:00 +34689,19960715_8599,64506,30,71000,RENT,2.0,PERSONAL,12000,8.9,0,2020-10-15 04:15:47.946000+00:00,2020-10-15 04:15:47.946000+00:00 +30849,19660326_3937,78147,28,19200,RENT,0.0,MEDICAL,9250,14.22,1,2020-12-03 02:54:41.403000+00:00,2020-12-03 02:54:41.403000+00:00 +23291,19920505_5607,75633,26,14400,RENT,1.0,PERSONAL,1500,9.25,0,2021-03-09 10:53:17.931000+00:00,2021-03-09 10:53:17.931000+00:00 +10598,19640113_6316,31795,23,54504,RENT,1.0,PERSONAL,20000,16.7,1,2021-08-18 05:39:04.224000+00:00,2021-08-18 05:39:04.224000+00:00 +27250,19980601_1483,91977,28,80000,RENT,7.0,VENTURE,3025,9.99,0,2021-01-17 23:50:17.786000+00:00,2021-01-17 23:50:17.786000+00:00 +34031,19810306_4085,51301,28,57000,RENT,2.0,VENTURE,15000,14.11,1,2020-10-23 13:32:39.044000+00:00,2020-10-23 13:32:39.044000+00:00 +29704,19830721_3688,48470,33,36000,RENT,5.0,DEBTCONSOLIDATION,7000,5.79,1,2020-12-17 17:09:52.811000+00:00,2020-12-17 17:09:52.811000+00:00 +34918,19611012_2049,55805,29,127000,MORTGAGE,1.0,MEDICAL,24250,11.36,0,2020-10-12 06:12:45.665000+00:00,2020-10-12 06:12:45.665000+00:00 +35076,19720423_7339,2817,28,60000,MORTGAGE,5.0,PERSONAL,6000,10.99,0,2020-10-10 05:52:50.903000+00:00,2020-10-10 05:52:50.903000+00:00 +28135,19471207_8523,45695,30,93500,RENT,3.0,EDUCATION,9925,12.09,0,2021-01-06 17:07:06.872000+00:00,2021-01-06 17:07:06.872000+00:00 +27291,19800217_9841,91615,32,35000,MORTGAGE,9.0,VENTURE,12000,9.63,0,2021-01-17 11:17:47.246000+00:00,2021-01-17 11:17:47.246000+00:00 +20569,19650605_8562,32465,22,72644,MORTGAGE,3.0,PERSONAL,5900,7.49,1,2021-04-13 03:32:33.261000+00:00,2021-04-13 03:32:33.261000+00:00 +33370,19840427_4080,64746,27,106000,MORTGAGE,11.0,HOMEIMPROVEMENT,4000,11.71,0,2020-10-31 23:44:33.839000+00:00,2020-10-31 23:44:33.839000+00:00 +18712,19710706_6125,19057,25,65000,MORTGAGE,1.0,PERSONAL,12000,7.49,0,2021-05-06 19:35:42.331000+00:00,2021-05-06 19:35:42.331000+00:00 +17231,19891012_7651,28515,26,36000,RENT,10.0,VENTURE,7500,10.59,0,2021-05-25 16:37:47.917000+00:00,2021-05-25 16:37:47.917000+00:00 +36754,19531122_7874,43783,36,41000,RENT,1.0,PERSONAL,7000,9.63,0,2020-09-18 20:35:02.481000+00:00,2020-09-18 20:35:02.481000+00:00 +34050,19970304_6968,28449,30,131004,MORTGAGE,14.0,VENTURE,5000,8.49,0,2020-10-23 07:43:55.623000+00:00,2020-10-23 07:43:55.623000+00:00 +21519,19741217_9741,77342,25,96000,RENT,1.0,HOMEIMPROVEMENT,12000,9.99,0,2021-04-01 00:56:22.224000+00:00,2021-04-01 00:56:22.224000+00:00 +20961,19530119_2516,38506,24,81996,MORTGAGE,4.0,EDUCATION,14000,7.88,0,2021-04-08 03:37:50.054000+00:00,2021-04-08 03:37:50.054000+00:00 +21544,19920310_7740,78402,24,65450,MORTGAGE,8.0,MEDICAL,5000,16.77,1,2021-03-31 17:17:31.407000+00:00,2021-03-31 17:17:31.407000+00:00 +37414,19790408_2169,72131,49,74000,MORTGAGE,20.0,DEBTCONSOLIDATION,12000,17.99,1,2020-09-10 10:41:28.918000+00:00,2020-09-10 10:41:28.918000+00:00 +24125,19540902_7647,7061,23,78000,MORTGAGE,7.0,DEBTCONSOLIDATION,4200,13.06,0,2021-02-26 19:46:09.883000+00:00,2021-02-26 19:46:09.883000+00:00 +28760,20011207_6345,85630,34,47000,OWN,8.0,VENTURE,14000,9.99,0,2020-12-29 17:55:56.453000+00:00,2020-12-29 17:55:56.453000+00:00 +34067,19470104_5144,4780,30,134500,MORTGAGE,9.0,DEBTCONSOLIDATION,24000,15.96,0,2020-10-23 02:31:54.668000+00:00,2020-10-23 02:31:54.668000+00:00 +36792,19831030_9701,61548,42,40000,RENT,0.0,DEBTCONSOLIDATION,7200,10.75,0,2020-09-18 08:57:35.639000+00:00,2020-09-18 08:57:35.639000+00:00 +19013,19590207_8723,80002,26,66000,MORTGAGE,6.0,DEBTCONSOLIDATION,7000,9.63,0,2021-05-02 23:31:11.297000+00:00,2021-05-02 23:31:11.297000+00:00 +24540,19710506_8913,29056,22,38000,MORTGAGE,6.0,DEBTCONSOLIDATION,4500,5.79,0,2021-02-21 12:49:18.324000+00:00,2021-02-21 12:49:18.324000+00:00 +17681,19580304_8660,42041,22,31200,RENT,0.0,PERSONAL,8000,14.3,1,2021-05-19 22:58:33.215000+00:00,2021-05-19 22:58:33.215000+00:00 +16069,19800304_9440,46260,21,50460,OWN,4.0,VENTURE,6000,13.49,0,2021-06-09 12:05:00.281000+00:00,2021-06-09 12:05:00.281000+00:00 +35548,19570818_6418,70749,28,54000,RENT,2.0,HOMEIMPROVEMENT,12000,15.31,1,2020-10-04 05:29:49.082000+00:00,2020-10-04 05:29:49.082000+00:00 +35379,19520406_3523,25565,32,100000,RENT,7.0,DEBTCONSOLIDATION,14400,13.35,0,2020-10-06 09:11:37.403000+00:00,2020-10-06 09:11:37.403000+00:00 +26165,19761103_7537,96161,27,66000,RENT,3.0,DEBTCONSOLIDATION,20000,10.62,0,2021-01-31 19:44:15.234000+00:00,2021-01-31 19:44:15.234000+00:00 +35769,19831110_4953,75206,39,58000,RENT,5.0,EDUCATION,21000,10.83,1,2020-10-01 09:53:36.662000+00:00,2020-10-01 09:53:36.662000+00:00 +26066,19920908_6532,27896,28,18000,OWN,2.0,MEDICAL,6350,13.57,1,2021-02-02 02:01:17.268000+00:00,2021-02-02 02:01:17.268000+00:00 +17450,19821018_1579,97301,23,58000,OWN,4.0,MEDICAL,15000,10.25,0,2021-05-22 21:38:17.962000+00:00,2021-05-22 21:38:17.962000+00:00 +38153,19630811_9728,50533,37,22800,OWN,18.0,EDUCATION,7000,6.03,0,2020-09-01 00:37:57.974000+00:00,2020-09-01 00:37:57.974000+00:00 +16275,19490917_1398,12498,24,82000,RENT,8.0,EDUCATION,7000,10.0,0,2021-06-06 21:04:06.350000+00:00,2021-06-06 21:04:06.350000+00:00 +30884,19930919_7687,60190,32,70000,RENT,1.0,DEBTCONSOLIDATION,9450,10.83,0,2020-12-02 16:12:18.259000+00:00,2020-12-02 16:12:18.259000+00:00 +31709,19680505_6620,51439,30,75000,MORTGAGE,2.0,HOMEIMPROVEMENT,21000,14.35,0,2020-11-22 03:50:21.306000+00:00,2020-11-22 03:50:21.306000+00:00 +20274,19780130_4840,49922,24,75000,MORTGAGE,1.0,EDUCATION,2000,8.94,0,2021-04-16 21:46:56.899000+00:00,2021-04-16 21:46:56.899000+00:00 +38046,19680817_3924,72044,36,290000,MORTGAGE,6.0,PERSONAL,25000,15.65,0,2020-09-02 09:21:49.870000+00:00,2020-09-02 09:21:49.870000+00:00 +14726,19940301_5862,8092,22,45000,MORTGAGE,6.0,EDUCATION,10000,7.88,0,2021-06-26 14:54:15.758000+00:00,2021-06-26 14:54:15.758000+00:00 +36579,19601202_7706,57567,39,44500,RENT,5.0,MEDICAL,6000,7.88,0,2020-09-21 02:06:58.198000+00:00,2020-09-21 02:06:58.198000+00:00 +10232,19730313_4796,46990,24,180000,OWN,8.0,EDUCATION,21000,11.48,0,2021-08-22 21:36:35.381000+00:00,2021-08-22 21:36:35.381000+00:00 +21221,19740801_6924,19086,23,85000,MORTGAGE,1.0,VENTURE,21400,13.49,0,2021-04-04 20:05:49.560000+00:00,2021-04-04 20:05:49.560000+00:00 +13055,19810707_4516,60130,25,35220,RENT,2.0,DEBTCONSOLIDATION,10500,7.29,0,2021-07-17 22:03:35.551000+00:00,2021-07-17 22:03:35.551000+00:00 +35013,19820120_9786,44615,31,89000,RENT,1.0,MEDICAL,10000,10.99,0,2020-10-11 01:09:08.561000+00:00,2020-10-11 01:09:08.561000+00:00 +37421,19640122_4276,28390,36,33600,RENT,3.0,EDUCATION,12000,11.36,1,2020-09-10 08:33:00.289000+00:00,2020-09-10 08:33:00.289000+00:00 +31374,19951125_2412,68134,31,47000,RENT,5.0,EDUCATION,10000,11.99,0,2020-11-26 10:18:54.251000+00:00,2020-11-26 10:18:54.251000+00:00 +11780,19531111_7277,76861,23,75000,RENT,0.0,VENTURE,2200,7.9,0,2021-08-03 04:04:47.207000+00:00,2021-08-03 04:04:47.207000+00:00 +16180,19861210_1873,15234,25,60000,RENT,0.0,MEDICAL,7000,9.25,0,2021-06-08 02:07:43.454000+00:00,2021-06-08 02:07:43.454000+00:00 +23721,19591009_8668,83544,26,150000,MORTGAGE,8.0,MEDICAL,35000,15.96,0,2021-03-03 23:21:07.882000+00:00,2021-03-03 23:21:07.882000+00:00 +29689,20010219_3501,23882,27,55000,MORTGAGE,5.0,PERSONAL,5000,6.62,0,2020-12-17 21:45:11.301000+00:00,2020-12-17 21:45:11.301000+00:00 +20642,19560101_4944,22435,25,80000,MORTGAGE,4.0,MEDICAL,25000,13.49,0,2021-04-12 05:12:43.276000+00:00,2021-04-12 05:12:43.276000+00:00 +21440,19640130_1657,52031,26,86004,RENT,0.0,EDUCATION,12000,12.99,0,2021-04-02 01:06:19.605000+00:00,2021-04-02 01:06:19.605000+00:00 +25243,19970111_2119,94005,25,38000,RENT,9.0,EDUCATION,4800,11.71,0,2021-02-12 13:46:31.757000+00:00,2021-02-12 13:46:31.757000+00:00 +37463,19810929_8057,68144,38,82000,OWN,4.0,HOMEIMPROVEMENT,10000,11.99,0,2020-09-09 19:42:08.517000+00:00,2020-09-09 19:42:08.517000+00:00 +34492,19940607_9238,92027,29,250000,MORTGAGE,13.0,EDUCATION,20000,13.79,0,2020-10-17 16:31:30.782000+00:00,2020-10-17 16:31:30.782000+00:00 +15744,19510408_7251,49229,23,50000,RENT,4.0,DEBTCONSOLIDATION,6000,11.49,0,2021-06-13 15:30:00.899000+00:00,2021-06-13 15:30:00.899000+00:00 +25487,19520915_1114,97004,24,85000,RENT,5.0,MEDICAL,25000,10.62,0,2021-02-09 11:08:10.985000+00:00,2021-02-09 11:08:10.985000+00:00 +32452,19710506_3801,63440,32,80000,RENT,3.0,HOMEIMPROVEMENT,12000,6.62,0,2020-11-12 16:33:25.431000+00:00,2020-11-12 16:33:25.431000+00:00 +27098,19601130_3346,63361,33,41277,RENT,3.0,EDUCATION,3000,10.0,1,2021-01-19 22:20:05.152000+00:00,2021-01-19 22:20:05.152000+00:00 +13367,19471122_6915,12918,23,36300,RENT,0.0,VENTURE,4000,7.68,0,2021-07-13 22:37:10.958000+00:00,2021-07-13 22:37:10.958000+00:00 +17982,19901001_7173,97106,23,54000,MORTGAGE,7.0,DEBTCONSOLIDATION,5000,14.96,1,2021-05-16 02:54:02.181000+00:00,2021-05-16 02:54:02.181000+00:00 +17861,19861201_1241,99503,24,60000,MORTGAGE,2.0,MEDICAL,9700,8.88,0,2021-05-17 15:54:51.334000+00:00,2021-05-17 15:54:51.334000+00:00 +16776,19630530_3579,17584,24,33600,RENT,0.0,PERSONAL,7000,13.61,0,2021-05-31 11:48:48.782000+00:00,2021-05-31 11:48:48.782000+00:00 +31541,19940404_4477,14519,34,57000,RENT,1.0,EDUCATION,10000,11.14,0,2020-11-24 07:13:48.395000+00:00,2020-11-24 07:13:48.395000+00:00 +27264,19850319_6145,63135,27,33600,RENT,4.0,PERSONAL,3200,11.99,0,2021-01-17 19:33:20.529000+00:00,2021-01-17 19:33:20.529000+00:00 +22606,19710526_9600,1450,23,105000,MORTGAGE,4.0,VENTURE,7200,7.51,0,2021-03-18 04:25:42.310000+00:00,2021-03-18 04:25:42.310000+00:00 +13522,19680327_2208,71306,23,67450,RENT,8.0,EDUCATION,4000,6.76,0,2021-07-11 23:12:19.894000+00:00,2021-07-11 23:12:19.894000+00:00 +38331,19660315_5107,5079,44,95000,MORTGAGE,4.0,EDUCATION,10000,10.95,0,2020-08-29 18:10:58.558000+00:00,2020-08-29 18:10:58.558000+00:00 +29254,19900816_4754,27020,28,75000,RENT,4.0,EDUCATION,6000,6.92,0,2020-12-23 10:49:07.513000+00:00,2020-12-23 10:49:07.513000+00:00 +38435,19511121_3548,3745,60,29000,RENT,0.0,HOMEIMPROVEMENT,2500,15.33,1,2020-08-28 10:22:10.361000+00:00,2020-08-28 10:22:10.361000+00:00 +33423,19771004_1938,77571,32,108202,MORTGAGE,6.0,MEDICAL,9600,13.79,0,2020-10-31 07:31:48.508000+00:00,2020-10-31 07:31:48.508000+00:00 +12897,19751229_7032,22733,26,36000,MORTGAGE,3.0,HOMEIMPROVEMENT,5400,7.51,0,2021-07-19 22:23:30.313000+00:00,2021-07-19 22:23:30.313000+00:00 +22779,19621211_3311,8518,25,109000,MORTGAGE,5.0,VENTURE,5000,8.49,0,2021-03-15 23:30:29.058000+00:00,2021-03-15 23:30:29.058000+00:00 +38173,19700917_6945,63071,46,60000,MORTGAGE,7.0,PERSONAL,3600,6.99,0,2020-08-31 18:30:53.320000+00:00,2020-08-31 18:30:53.320000+00:00 +25226,20010514_4088,68760,23,48000,OWN,8.0,VENTURE,6000,12.87,0,2021-02-12 18:58:32.712000+00:00,2021-02-12 18:58:32.712000+00:00 +12001,19571119_4668,11548,22,31200,MORTGAGE,2.0,DEBTCONSOLIDATION,1050,7.51,1,2021-07-31 08:28:34.786000+00:00,2021-07-31 08:28:34.786000+00:00 +27374,19730520_3988,62567,35,36000,OWN,1.0,VENTURE,3925,9.99,0,2021-01-16 09:54:24.935000+00:00,2021-01-16 09:54:24.935000+00:00 +23211,19951024_2900,57446,25,25000,RENT,1.0,DEBTCONSOLIDATION,1600,9.99,0,2021-03-10 11:21:36.544000+00:00,2021-03-10 11:21:36.544000+00:00 +12316,19540218_3486,40159,26,48000,RENT,4.0,MEDICAL,3000,10.14,0,2021-07-27 08:07:06.495000+00:00,2021-07-27 08:07:06.495000+00:00 +22487,19840625_1648,37923,24,100800,MORTGAGE,4.0,EDUCATION,21000,11.49,0,2021-03-19 16:49:48.998000+00:00,2021-03-19 16:49:48.998000+00:00 +28001,19690113_2819,11705,30,31000,RENT,1.0,MEDICAL,4800,13.43,0,2021-01-08 10:06:32.050000+00:00,2021-01-08 10:06:32.050000+00:00 +29190,19751225_4092,28757,27,50000,MORTGAGE,2.0,HOMEIMPROVEMENT,10750,13.48,0,2020-12-24 06:23:46.404000+00:00,2020-12-24 06:23:46.404000+00:00 +38324,19670302_8138,22565,42,75000,RENT,6.0,MEDICAL,17000,7.9,0,2020-08-29 20:19:27.187000+00:00,2020-08-29 20:19:27.187000+00:00 +31962,19790712_7638,38850,28,79000,MORTGAGE,4.0,VENTURE,2000,16.0,0,2020-11-18 22:26:49.440000+00:00,2020-11-18 22:26:49.440000+00:00 +33105,19630813_8026,30153,32,63000,RENT,11.0,EDUCATION,14000,6.91,0,2020-11-04 08:48:20.497000+00:00,2020-11-04 08:48:20.497000+00:00 +34924,19500627_4737,79097,30,219300,MORTGAGE,7.0,MEDICAL,35000,19.69,1,2020-10-12 04:22:38.269000+00:00,2020-10-12 04:22:38.269000+00:00 +38174,19490818_7594,30903,39,201000,RENT,6.0,VENTURE,13500,11.49,0,2020-08-31 18:12:32.088000+00:00,2020-08-31 18:12:32.088000+00:00 +18412,19610509_4145,13672,24,62677,MORTGAGE,2.0,MEDICAL,6000,10.2,0,2021-05-10 15:21:52.132000+00:00,2021-05-10 15:21:52.132000+00:00 +31920,19670809_3020,43462,29,60000,RENT,2.0,VENTURE,11200,14.22,0,2020-11-19 11:17:41.212000+00:00,2020-11-19 11:17:41.212000+00:00 +17567,19670311_1481,12723,26,59500,MORTGAGE,9.0,VENTURE,12000,6.62,0,2021-05-21 09:50:53.739000+00:00,2021-05-21 09:50:53.739000+00:00 +29112,19641010_2875,71030,29,50000,OWN,6.0,MEDICAL,4000,5.42,0,2020-12-25 06:15:22.553000+00:00,2020-12-25 06:15:22.553000+00:00 +16842,19660820_4704,30184,23,40000,RENT,4.0,MEDICAL,7000,11.58,0,2021-05-30 15:37:27.426000+00:00,2021-05-30 15:37:27.426000+00:00 +11392,19960903_3617,26105,21,36000,RENT,0.0,EDUCATION,1500,10.37,0,2021-08-08 02:46:05.483000+00:00,2021-08-08 02:46:05.483000+00:00 +31284,19511213_6264,77963,35,70000,OWN,6.0,VENTURE,17000,9.88,0,2020-11-27 13:50:45.191000+00:00,2020-11-27 13:50:45.191000+00:00 +10126,19530429_3321,32907,23,52000,RENT,1.0,PERSONAL,25000,13.22,1,2021-08-24 06:02:06.045000+00:00,2021-08-24 06:02:06.045000+00:00 +29078,20010225_3064,61744,30,50000,MORTGAGE,2.0,EDUCATION,3975,10.99,0,2020-12-25 16:39:24.463000+00:00,2020-12-25 16:39:24.463000+00:00 +14949,19460918_9720,55768,22,114000,RENT,2.0,VENTURE,5000,15.31,1,2021-06-23 18:41:20.872000+00:00,2021-06-23 18:41:20.872000+00:00 +23066,19890804_1120,13668,21,117000,MORTGAGE,3.0,PERSONAL,4800,14.54,0,2021-03-12 07:42:55.282000+00:00,2021-03-12 07:42:55.282000+00:00 +21248,19710629_3106,64427,26,41000,RENT,8.0,DEBTCONSOLIDATION,3000,12.42,0,2021-04-04 11:50:16.277000+00:00,2021-04-04 11:50:16.277000+00:00 +30445,19701002_3781,8901,30,24000,RENT,14.0,EDUCATION,5000,11.11,1,2020-12-08 06:29:39.402000+00:00,2020-12-08 06:29:39.402000+00:00 +16620,19620312_4269,33493,22,54000,MORTGAGE,5.0,MEDICAL,7000,10.65,0,2021-06-02 11:32:01.079000+00:00,2021-06-02 11:32:01.079000+00:00 +29454,19810930_9111,2539,33,30000,RENT,0.0,PERSONAL,6500,8.49,0,2020-12-20 21:38:20.979000+00:00,2020-12-20 21:38:20.979000+00:00 +14478,19880606_6310,62207,24,44000,MORTGAGE,8.0,EDUCATION,2000,5.79,0,2021-06-29 18:46:01.460000+00:00,2021-06-29 18:46:01.460000+00:00 +19275,19640908_3903,50650,23,68500,MORTGAGE,7.0,EDUCATION,4200,11.66,0,2021-04-29 15:22:28.337000+00:00,2021-04-29 15:22:28.337000+00:00 +27941,19521126_2255,31758,28,49000,RENT,12.0,HOMEIMPROVEMENT,4500,14.27,1,2021-01-09 04:27:46.010000+00:00,2021-01-09 04:27:46.010000+00:00 +17106,19910918_5172,56375,25,67500,RENT,2.0,EDUCATION,7200,7.88,0,2021-05-27 06:52:02.001000+00:00,2021-05-27 06:52:02.001000+00:00 +17155,19970507_5690,55448,24,56000,MORTGAGE,3.0,EDUCATION,15000,8.49,0,2021-05-26 15:52:41.600000+00:00,2021-05-26 15:52:41.600000+00:00 +30375,19590310_9111,10451,27,60000,MORTGAGE,3.0,HOMEIMPROVEMENT,10000,12.53,0,2020-12-09 03:54:25.689000+00:00,2020-12-09 03:54:25.689000+00:00 +17038,19531129_4322,30065,26,38400,RENT,2.0,MEDICAL,7200,9.91,0,2021-05-28 03:40:05.822000+00:00,2021-05-28 03:40:05.822000+00:00 +20553,19671202_7605,65473,25,82000,RENT,9.0,PERSONAL,11200,5.42,0,2021-04-13 08:26:12.984000+00:00,2021-04-13 08:26:12.984000+00:00 +10350,19610522_3933,90007,23,103000,RENT,6.0,PERSONAL,24250,15.7,0,2021-08-21 09:30:49.926000+00:00,2021-08-21 09:30:49.926000+00:00 +22845,19770601_6229,83858,23,110000,MORTGAGE,8.0,VENTURE,7500,13.72,0,2021-03-15 03:19:07.702000+00:00,2021-03-15 03:19:07.702000+00:00 +29093,19680125_9116,2467,28,50000,RENT,0.0,PERSONAL,6000,10.59,0,2020-12-25 12:04:05.973000+00:00,2020-12-25 12:04:05.973000+00:00 +27800,19921010_6299,51638,30,39962,MORTGAGE,5.0,DEBTCONSOLIDATION,3200,9.7,0,2021-01-10 23:35:39.817000+00:00,2021-01-10 23:35:39.817000+00:00 +14325,19590630_8466,87571,22,43000,OWN,5.0,VENTURE,3500,11.49,0,2021-07-01 17:34:10.059000+00:00,2021-07-01 17:34:10.059000+00:00 +21727,19701030_7635,92562,24,78000,MORTGAGE,0.0,DEBTCONSOLIDATION,19500,15.31,1,2021-03-29 09:18:45.828000+00:00,2021-03-29 09:18:45.828000+00:00 +14243,19700209_7465,93463,25,42240,OWN,10.0,HOMEIMPROVEMENT,8000,6.62,0,2021-07-02 18:39:11.138000+00:00,2021-07-02 18:39:11.138000+00:00 +13442,19980326_7550,7024,24,44000,RENT,1.0,DEBTCONSOLIDATION,4000,17.58,1,2021-07-12 23:40:38.507000+00:00,2021-07-12 23:40:38.507000+00:00 +19895,19620609_6991,98331,25,66800,MORTGAGE,3.0,DEBTCONSOLIDATION,12000,9.63,1,2021-04-21 17:43:04.081000+00:00,2021-04-21 17:43:04.081000+00:00 +11876,19530506_9333,68464,23,56000,RENT,2.0,MEDICAL,2400,8.9,0,2021-08-01 22:42:48.870000+00:00,2021-08-01 22:42:48.870000+00:00 +28213,19450216_7637,70121,27,36000,MORTGAGE,9.0,DEBTCONSOLIDATION,15200,16.35,1,2021-01-05 17:15:30.724000+00:00,2021-01-05 17:15:30.724000+00:00 +26120,19481212_5954,48325,30,20000,OWN,14.0,HOMEIMPROVEMENT,3800,6.99,0,2021-02-01 09:30:10.704000+00:00,2021-02-01 09:30:10.704000+00:00 +14753,19890330_3512,24747,21,59800,RENT,5.0,PERSONAL,5000,11.36,0,2021-06-26 06:38:42.476000+00:00,2021-06-26 06:38:42.476000+00:00 +14922,20010124_6565,73051,22,39600,MORTGAGE,6.0,DEBTCONSOLIDATION,4650,10.28,1,2021-06-24 02:56:54.154000+00:00,2021-06-24 02:56:54.154000+00:00 +27956,19560828_9073,44702,34,40000,OWN,18.0,MEDICAL,8000,11.36,0,2021-01-08 23:52:27.520000+00:00,2021-01-08 23:52:27.520000+00:00 +21080,19990112_3904,83607,24,25000,RENT,3.0,EDUCATION,3000,8.32,0,2021-04-06 15:13:43.366000+00:00,2021-04-06 15:13:43.366000+00:00 +34505,19940217_4038,87506,27,290000,MORTGAGE,0.0,MEDICAL,7875,9.99,0,2020-10-17 12:32:54.758000+00:00,2020-10-17 12:32:54.758000+00:00 +12169,19600716_5309,10583,25,17400,RENT,0.0,EDUCATION,3000,11.83,1,2021-07-29 05:05:07.698000+00:00,2021-07-29 05:05:07.698000+00:00 +12991,19930420_4807,54227,22,36480,RENT,2.0,VENTURE,10800,8.88,0,2021-07-18 17:38:14.442000+00:00,2021-07-18 17:38:14.442000+00:00 +13441,19450626_5454,17846,26,33660,MORTGAGE,0.0,DEBTCONSOLIDATION,4500,5.99,1,2021-07-12 23:58:59.740000+00:00,2021-07-12 23:58:59.740000+00:00 +28049,19880421_9596,6053,28,56000,RENT,5.0,DEBTCONSOLIDATION,4800,16.45,1,2021-01-07 19:25:32.882000+00:00,2021-01-07 19:25:32.882000+00:00 +15961,19640219_1473,27922,24,69996,RENT,0.0,HOMEIMPROVEMENT,6000,13.06,0,2021-06-10 21:07:13.409000+00:00,2021-06-10 21:07:13.409000+00:00 +30376,19460103_2268,19456,27,60000,MORTGAGE,3.0,PERSONAL,6000,11.48,0,2020-12-09 03:36:04.456000+00:00,2020-12-09 03:36:04.456000+00:00 +11183,19750506_9991,86446,23,28600,RENT,7.0,EDUCATION,1000,7.9,0,2021-08-10 18:42:03.111000+00:00,2021-08-10 18:42:03.111000+00:00 +36546,19730920_9744,85344,48,42000,MORTGAGE,3.0,DEBTCONSOLIDATION,14400,12.87,1,2020-09-21 12:12:38.876000+00:00,2020-09-21 12:12:38.876000+00:00 +17630,19900917_8780,72064,23,60000,MORTGAGE,7.0,MEDICAL,7000,7.9,0,2021-05-20 14:34:36.081000+00:00,2021-05-20 14:34:36.081000+00:00 +22782,19540425_5777,37334,25,109000,MORTGAGE,5.0,DEBTCONSOLIDATION,15000,10.25,0,2021-03-15 22:35:25.360000+00:00,2021-03-15 22:35:25.360000+00:00 +31512,19850324_5443,96781,32,73000,MORTGAGE,7.0,PERSONAL,9600,7.9,0,2020-11-24 16:06:04.142000+00:00,2020-11-24 16:06:04.142000+00:00 +10133,19790915_8427,28208,22,210000,MORTGAGE,6.0,VENTURE,16000,10.62,0,2021-08-24 03:53:37.416000+00:00,2021-08-24 03:53:37.416000+00:00 +38216,19930114_5390,18013,42,31000,MORTGAGE,5.0,DEBTCONSOLIDATION,7200,11.99,0,2020-08-31 05:21:40.316000+00:00,2020-08-31 05:21:40.316000+00:00 +37408,19810108_3374,46552,36,80000,MORTGAGE,5.0,EDUCATION,20000,10.99,0,2020-09-10 12:31:36.314000+00:00,2020-09-10 12:31:36.314000+00:00 +31199,19481020_3358,24579,29,70000,OWN,13.0,PERSONAL,25000,15.62,0,2020-11-28 15:50:49.968000+00:00,2020-11-28 15:50:49.968000+00:00 +12171,19550213_1841,3218,21,17916,RENT,1.0,MEDICAL,3000,6.0,1,2021-07-29 04:28:25.232000+00:00,2021-07-29 04:28:25.232000+00:00 +30397,19491201_5532,85280,28,60000,OTHER,5.0,PERSONAL,16000,12.18,0,2020-12-08 21:10:38.570000+00:00,2020-12-08 21:10:38.570000+00:00 +13090,19590910_7782,39648,26,54996,RENT,1.0,HOMEIMPROVEMENT,10250,7.29,0,2021-07-17 11:21:12.408000+00:00,2021-07-17 11:21:12.408000+00:00 +21326,19480815_1572,6231,23,85000,MORTGAGE,7.0,EDUCATION,20000,13.22,0,2021-04-03 11:58:40.129000+00:00,2021-04-03 11:58:40.129000+00:00 +14825,19801201_5126,52747,23,66000,RENT,7.0,VENTURE,5000,10.39,0,2021-06-25 08:37:13.723000+00:00,2021-06-25 08:37:13.723000+00:00 +20819,19490205_2993,30629,23,80000,MORTGAGE,5.0,VENTURE,20000,14.96,0,2021-04-09 23:04:05.093000+00:00,2021-04-09 23:04:05.093000+00:00 +15902,19661225_5079,77223,21,21600,RENT,6.0,EDUCATION,7000,13.98,1,2021-06-11 15:10:06.137000+00:00,2021-06-11 15:10:06.137000+00:00 +36113,20010602_1772,41189,39,36000,MORTGAGE,1.0,PERSONAL,7125,7.9,0,2020-09-27 00:39:52.623000+00:00,2020-09-27 00:39:52.623000+00:00 +16378,19720521_7275,56373,26,28000,RENT,7.0,MEDICAL,6500,7.51,0,2021-06-05 13:33:39.385000+00:00,2021-06-05 13:33:39.385000+00:00 +16198,19840409_7269,16353,24,51500,MORTGAGE,2.0,VENTURE,6300,5.79,0,2021-06-07 20:37:21.266000+00:00,2021-06-07 20:37:21.266000+00:00 +29031,19500706_4853,96113,27,40800,RENT,11.0,PERSONAL,6000,7.66,0,2020-12-26 07:02:02.399000+00:00,2020-12-26 07:02:02.399000+00:00 +19072,19890405_6360,1721,26,66504,MORTGAGE,10.0,EDUCATION,12000,12.53,0,2021-05-02 05:28:18.570000+00:00,2021-05-02 05:28:18.570000+00:00 +11519,19721205_7203,78070,23,30000,RENT,0.0,PERSONAL,1925,12.18,0,2021-08-06 11:55:08.934000+00:00,2021-08-06 11:55:08.934000+00:00 +32154,19870217_4068,99115,27,36000,RENT,2.0,DEBTCONSOLIDATION,12000,14.09,1,2020-11-16 11:42:52.767000+00:00,2020-11-16 11:42:52.767000+00:00 +19246,19940529_9085,60181,26,32000,RENT,6.0,PERSONAL,10000,6.99,1,2021-04-30 00:14:44.085000+00:00,2021-04-30 00:14:44.085000+00:00 +15929,19800423_2210,1746,24,58668,RENT,8.0,MEDICAL,6000,15.05,1,2021-06-11 06:54:32.855000+00:00,2021-06-11 06:54:32.855000+00:00 +29011,19900411_4842,54626,31,49500,MORTGAGE,3.0,DEBTCONSOLIDATION,10000,10.75,0,2020-12-26 13:09:07.052000+00:00,2020-12-26 13:09:07.052000+00:00 +18403,19830916_6494,64074,22,150000,RENT,2.0,EDUCATION,8200,6.99,0,2021-05-10 18:07:03.226000+00:00,2021-05-10 18:07:03.226000+00:00 +26363,19500514_4765,65061,27,102616,RENT,4.0,MEDICAL,18000,13.99,1,2021-01-29 07:10:11.165000+00:00,2021-01-29 07:10:11.165000+00:00 +16973,19970526_4469,62278,22,54000,RENT,1.0,EDUCATION,7000,13.47,0,2021-05-28 23:33:05.946000+00:00,2021-05-28 23:33:05.946000+00:00 +28067,19841104_8948,97292,32,41000,MORTGAGE,2.0,PERSONAL,8000,15.05,0,2021-01-07 13:55:10.694000+00:00,2021-01-07 13:55:10.694000+00:00 +29793,19490930_4839,23148,35,32000,RENT,3.0,MEDICAL,7200,12.61,0,2020-12-16 13:56:23.104000+00:00,2020-12-16 13:56:23.104000+00:00 +10157,19961109_2655,93004,26,200000,MORTGAGE,4.0,EDUCATION,15000,14.79,0,2021-08-23 20:33:07.832000+00:00,2021-08-23 20:33:07.832000+00:00 +35214,19490711_1919,64640,30,28000,OWN,5.0,HOMEIMPROVEMENT,6300,11.99,0,2020-10-08 11:40:00.794000+00:00,2020-10-08 11:40:00.794000+00:00 +32555,19890416_8777,94579,28,99996,RENT,6.0,VENTURE,12000,13.61,0,2020-11-11 09:02:58.466000+00:00,2020-11-11 09:02:58.466000+00:00 +11741,19770911_6921,28386,22,32000,RENT,0.0,DEBTCONSOLIDATION,14000,13.49,1,2021-08-03 16:00:35.281000+00:00,2021-08-03 16:00:35.281000+00:00 +36337,19620401_9648,46031,36,30000,RENT,1.0,DEBTCONSOLIDATION,5000,11.14,0,2020-09-24 04:08:36.504000+00:00,2020-09-24 04:08:36.504000+00:00 +20205,19510318_7857,5461,22,75000,MORTGAGE,6.0,PERSONAL,9000,7.14,0,2021-04-17 18:53:21.953000+00:00,2021-04-17 18:53:21.953000+00:00 +29399,19491101_4166,45232,35,52000,MORTGAGE,7.0,DEBTCONSOLIDATION,7000,6.03,0,2020-12-21 14:27:48.776000+00:00,2020-12-21 14:27:48.776000+00:00 +14590,19940408_8669,6855,26,45000,MORTGAGE,4.0,PERSONAL,11000,8.9,0,2021-06-28 08:30:23.401000+00:00,2021-06-28 08:30:23.401000+00:00 +37570,19921013_2895,4062,39,90000,MORTGAGE,15.0,VENTURE,10500,7.66,0,2020-09-08 10:58:16.621000+00:00,2020-09-08 10:58:16.621000+00:00 +36789,19931224_9100,62054,38,55000,MORTGAGE,3.0,MEDICAL,14000,13.22,0,2020-09-18 09:52:39.337000+00:00,2020-09-18 09:52:39.337000+00:00 +24780,19871122_7489,2454,23,55000,MORTGAGE,4.0,VENTURE,8000,7.49,0,2021-02-18 11:24:22.483000+00:00,2021-02-18 11:24:22.483000+00:00 +37158,19720905_6281,49938,41,36000,RENT,5.0,DEBTCONSOLIDATION,10000,6.03,0,2020-09-13 17:00:04.481000+00:00,2020-09-13 17:00:04.481000+00:00 +27105,19660329_8158,11955,28,48876,RENT,5.0,PERSONAL,3000,14.59,0,2021-01-19 20:11:36.523000+00:00,2021-01-19 20:11:36.523000+00:00 +13991,19551019_7436,93457,25,41520,OWN,9.0,MEDICAL,4400,7.29,0,2021-07-05 23:44:21.771000+00:00,2021-07-05 23:44:21.771000+00:00 +14557,19510101_5183,93458,24,44600,MORTGAGE,5.0,MEDICAL,20000,11.89,0,2021-06-28 18:36:04.079000+00:00,2021-06-28 18:36:04.079000+00:00 +13350,19470423_6247,46171,25,35000,RENT,4.0,VENTURE,4000,16.29,0,2021-07-14 03:49:11.913000+00:00,2021-07-14 03:49:11.913000+00:00 +20048,19630330_4458,76044,22,45000,RENT,6.0,PERSONAL,4000,7.9,0,2021-04-19 18:54:55.483000+00:00,2021-04-19 18:54:55.483000+00:00 +29938,19841006_5546,35772,27,45000,RENT,0.0,PERSONAL,7500,12.73,0,2020-12-14 17:35:04.366000+00:00,2020-12-14 17:35:04.366000+00:00 +24685,19830202_7128,92122,26,52000,MORTGAGE,6.0,PERSONAL,3300,10.99,0,2021-02-19 16:27:59.587000+00:00,2021-02-19 16:27:59.587000+00:00 +30901,19730906_2494,16405,27,19498,RENT,3.0,HOMEIMPROVEMENT,9575,6.62,1,2020-12-02 11:00:17.304000+00:00,2020-12-02 11:00:17.304000+00:00 +10802,19580123_5981,38305,21,36000,RENT,3.0,EDUCATION,18500,10.91,1,2021-08-15 15:14:52.759000+00:00,2021-08-15 15:14:52.759000+00:00 +20342,20001221_6791,43430,25,75600,MORTGAGE,2.0,PERSONAL,14500,9.63,0,2021-04-16 00:58:53.077000+00:00,2021-04-16 00:58:53.077000+00:00 +29909,19600309_2325,48457,27,56331,MORTGAGE,7.0,EDUCATION,10000,10.36,0,2020-12-15 02:27:20.114000+00:00,2020-12-15 02:27:20.114000+00:00 +12137,19710911_5659,29201,23,28000,RENT,2.0,VENTURE,2875,15.23,0,2021-07-29 14:52:27.143000+00:00,2021-07-29 14:52:27.143000+00:00 +26481,19460619_7783,56267,30,68000,RENT,5.0,EDUCATION,16000,14.09,0,2021-01-27 19:04:25.710000+00:00,2021-01-27 19:04:25.710000+00:00 +24890,19780229_6229,50107,23,105000,RENT,7.0,MEDICAL,7200,11.71,0,2021-02-17 01:45:26.890000+00:00,2021-02-17 01:45:26.890000+00:00 +16284,19480722_3017,49862,23,83236,RENT,6.0,EDUCATION,7000,6.92,0,2021-06-06 18:18:55.256000+00:00,2021-06-06 18:18:55.256000+00:00 +22832,19611127_2836,89112,22,110000,MORTGAGE,2.0,VENTURE,9000,11.11,0,2021-03-15 07:17:43.727000+00:00,2021-03-15 07:17:43.727000+00:00 +22103,19780106_1403,60642,25,73000,RENT,9.0,PERSONAL,13600,11.99,0,2021-03-24 14:17:42.344000+00:00,2021-03-24 14:17:42.344000+00:00 +20832,19760701_9246,55379,25,80000,MORTGAGE,10.0,EDUCATION,25000,15.31,0,2021-04-09 19:05:29.069000+00:00,2021-04-09 19:05:29.069000+00:00 +27178,19530315_5005,58220,27,34560,MORTGAGE,8.0,HOMEIMPROVEMENT,10625,16.4,0,2021-01-18 21:51:46.538000+00:00,2021-01-18 21:51:46.538000+00:00 +16421,19751117_2029,46746,22,52500,MORTGAGE,6.0,DEBTCONSOLIDATION,8000,15.23,0,2021-06-05 00:24:26.380000+00:00,2021-06-05 00:24:26.380000+00:00 +32483,19571002_1156,18417,30,84300,RENT,4.0,PERSONAL,12000,8.88,0,2020-11-12 07:04:27.219000+00:00,2020-11-12 07:04:27.219000+00:00 +26862,19840725_5252,29611,27,37200,RENT,0.0,MEDICAL,13500,10.78,1,2021-01-22 22:31:36.062000+00:00,2021-01-22 22:31:36.062000+00:00 +16393,19891024_1528,87317,26,52000,MORTGAGE,0.0,EDUCATION,3600,11.89,0,2021-06-05 08:58:20.895000+00:00,2021-06-05 08:58:20.895000+00:00 +14466,19860307_6905,37361,22,60000,RENT,6.0,EDUCATION,9000,7.14,0,2021-06-29 22:26:16.252000+00:00,2021-06-29 22:26:16.252000+00:00 +37785,19860422_2703,56588,37,110000,MORTGAGE,11.0,HOMEIMPROVEMENT,12000,7.88,0,2020-09-05 17:12:11.597000+00:00,2020-09-05 17:12:11.597000+00:00 +26586,19520901_1639,77501,30,34000,RENT,2.0,DEBTCONSOLIDATION,1500,14.61,1,2021-01-26 10:57:16.280000+00:00,2021-01-26 10:57:16.280000+00:00 +12065,19740506_1650,94115,24,32000,MORTGAGE,5.0,HOMEIMPROVEMENT,6000,5.42,0,2021-07-30 12:53:55.895000+00:00,2021-07-30 12:53:55.895000+00:00 +12005,19880906_6060,42240,26,22406,RENT,5.0,PERSONAL,2500,6.76,1,2021-07-31 07:15:09.856000+00:00,2021-07-31 07:15:09.856000+00:00 +15257,19660419_6721,36026,25,147750,RENT,3.0,HOMEIMPROVEMENT,5750,9.99,0,2021-06-19 20:28:21.210000+00:00,2021-06-19 20:28:21.210000+00:00 +35465,19500418_5864,78633,30,44760,RENT,1.0,MEDICAL,3300,11.71,0,2020-10-05 06:53:11.394000+00:00,2020-10-05 06:53:11.394000+00:00 +33343,19481214_2015,46163,27,105000,MORTGAGE,9.0,PERSONAL,24000,6.17,0,2020-11-01 08:00:07.121000+00:00,2020-11-01 08:00:07.121000+00:00 +18268,19630110_6337,36078,25,69000,RENT,1.0,HOMEIMPROVEMENT,8000,13.99,1,2021-05-12 11:24:49.637000+00:00,2021-05-12 11:24:49.637000+00:00 +23448,19511127_2917,36726,24,60000,RENT,7.0,HOMEIMPROVEMENT,15000,13.85,0,2021-03-07 10:51:44.401000+00:00,2021-03-07 10:51:44.401000+00:00 +14347,19861210_8269,64085,24,37000,MORTGAGE,7.0,DEBTCONSOLIDATION,6250,7.14,1,2021-07-01 10:50:22.940000+00:00,2021-07-01 10:50:22.940000+00:00 +25215,19790115_5596,92251,22,34000,RENT,4.0,MEDICAL,3800,18.25,1,2021-02-12 22:20:26.271000+00:00,2021-02-12 22:20:26.271000+00:00 +18710,19660816_8652,80512,24,65000,MORTGAGE,5.0,EDUCATION,9600,11.99,0,2021-05-06 20:12:24.796000+00:00,2021-05-06 20:12:24.796000+00:00 +30080,19700113_2978,75189,30,58239,MORTGAGE,3.0,VENTURE,10500,6.62,0,2020-12-12 22:08:49.327000+00:00,2020-12-12 22:08:49.327000+00:00 +21650,19850410_2023,81328,25,90000,MORTGAGE,9.0,VENTURE,5600,9.99,0,2021-03-30 08:52:00.744000+00:00,2021-03-30 08:52:00.744000+00:00 +17929,19720315_2142,44224,24,56000,RENT,2.0,MEDICAL,8000,7.51,0,2021-05-16 19:06:47.513000+00:00,2021-05-16 19:06:47.513000+00:00 +12945,19611008_2362,78723,21,50000,RENT,4.0,EDUCATION,11000,10.0,0,2021-07-19 07:42:31.145000+00:00,2021-07-19 07:42:31.145000+00:00 +25782,19831221_8581,10471,31,240000,MORTGAGE,15.0,MEDICAL,5500,5.42,0,2021-02-05 16:53:47.347000+00:00,2021-02-05 16:53:47.347000+00:00 +23702,19840309_5504,16508,23,74000,RENT,1.0,VENTURE,15000,15.65,1,2021-03-04 05:09:51.303000+00:00,2021-03-04 05:09:51.303000+00:00 +25351,19630312_4863,55386,22,24000,MORTGAGE,4.0,EDUCATION,1300,5.42,0,2021-02-11 04:44:18.628000+00:00,2021-02-11 04:44:18.628000+00:00 +10068,19470413_6684,27707,26,12000,OWN,2.0,EDUCATION,6100,7.51,1,2021-08-24 23:46:37.539000+00:00,2021-08-24 23:46:37.539000+00:00 +24348,19840413_6050,37121,25,58650,MORTGAGE,4.0,MEDICAL,20000,14.09,1,2021-02-23 23:33:14.997000+00:00,2021-02-23 23:33:14.997000+00:00 +11261,19580522_9064,7723,24,26650,MORTGAGE,6.0,MEDICAL,1000,12.53,0,2021-08-09 18:50:26.963000+00:00,2021-08-09 18:50:26.963000+00:00 +25786,19891006_3639,84057,29,240000,MORTGAGE,10.0,EDUCATION,5000,7.88,0,2021-02-05 15:40:22.416000+00:00,2021-02-05 15:40:22.416000+00:00 +24402,19900419_7120,25443,22,60000,MORTGAGE,1.0,PERSONAL,7500,11.66,0,2021-02-23 07:02:08.433000+00:00,2021-02-23 07:02:08.433000+00:00 +19880,19920216_3482,42262,21,47520,RENT,5.0,MEDICAL,10000,15.33,1,2021-04-21 22:18:22.571000+00:00,2021-04-21 22:18:22.571000+00:00 +37472,19740511_1621,75555,39,24000,RENT,0.0,HOMEIMPROVEMENT,3000,14.12,1,2020-09-09 16:56:57.423000+00:00,2020-09-09 16:56:57.423000+00:00 +32721,19481103_4053,50071,29,78000,MORTGAGE,7.0,HOMEIMPROVEMENT,18000,9.38,1,2020-11-09 06:16:13.843000+00:00,2020-11-09 06:16:13.843000+00:00 +26938,19450727_3414,47145,29,32108,RENT,4.0,VENTURE,2500,15.68,1,2021-01-21 23:16:42.379000+00:00,2021-01-21 23:16:42.379000+00:00 +13366,19611014_6261,19804,26,36000,RENT,0.0,PERSONAL,4000,10.14,0,2021-07-13 22:55:32.190000+00:00,2021-07-13 22:55:32.190000+00:00 +18873,19840121_8217,45898,23,40574,RENT,2.0,HOMEIMPROVEMENT,9500,14.74,1,2021-05-04 18:20:43.871000+00:00,2021-05-04 18:20:43.871000+00:00 +31837,19770206_8911,76448,31,77000,MORTGAGE,4.0,VENTURE,10000,14.65,0,2020-11-20 12:41:03.524000+00:00,2020-11-20 12:41:03.524000+00:00 +33028,19470804_1996,46562,33,97000,MORTGAGE,0.0,HOMEIMPROVEMENT,25000,16.0,0,2020-11-05 08:21:35.413000+00:00,2020-11-05 08:21:35.413000+00:00 +36062,19830910_8159,65804,37,55200,RENT,9.0,MEDICAL,12000,11.71,0,2020-09-27 16:15:55.489000+00:00,2020-09-27 16:15:55.489000+00:00 +11528,19980723_8466,73135,25,12000,RENT,2.0,HOMEIMPROVEMENT,2000,11.89,1,2021-08-06 09:09:57.840000+00:00,2021-08-06 09:09:57.840000+00:00 +17899,19570206_9751,46737,24,54000,RENT,3.0,MEDICAL,8000,8.07,0,2021-05-17 04:17:24.493000+00:00,2021-05-17 04:17:24.493000+00:00 +27700,19451214_8404,80444,34,30000,RENT,0.0,MEDICAL,4000,9.63,0,2021-01-12 06:11:03.084000+00:00,2021-01-12 06:11:03.084000+00:00 +36585,19740218_7612,70615,37,50000,MORTGAGE,15.0,VENTURE,8400,6.03,0,2020-09-21 00:16:50.802000+00:00,2020-09-21 00:16:50.802000+00:00 +16622,19640222_7849,28528,25,38000,RENT,3.0,VENTURE,6400,7.88,0,2021-06-02 10:55:18.614000+00:00,2021-06-02 10:55:18.614000+00:00 +14665,19810613_9053,50072,25,44000,RENT,9.0,MEDICAL,8500,14.27,1,2021-06-27 09:33:50.951000+00:00,2021-06-27 09:33:50.951000+00:00 +14795,19810903_8001,92815,24,64151,RENT,0.0,VENTURE,5000,11.86,0,2021-06-25 17:47:50.704000+00:00,2021-06-25 17:47:50.704000+00:00 +24169,19500222_6154,28119,25,78600,RENT,0.0,MEDICAL,8400,13.48,0,2021-02-26 06:18:35.645000+00:00,2021-02-26 06:18:35.645000+00:00 +21443,19760730_8887,84084,24,87000,RENT,0.0,VENTURE,12000,8.88,0,2021-04-02 00:11:15.907000+00:00,2021-04-02 00:11:15.907000+00:00 +23091,19460417_8163,44871,22,118812,MORTGAGE,6.0,MEDICAL,15000,13.35,0,2021-03-12 00:04:04.465000+00:00,2021-03-12 00:04:04.465000+00:00 +15718,19890227_7986,98418,24,50000,MORTGAGE,7.0,VENTURE,11000,16.89,0,2021-06-13 23:27:12.948000+00:00,2021-06-13 23:27:12.948000+00:00 +36790,19760509_6741,63623,37,29000,RENT,5.0,PERSONAL,7200,19.03,1,2020-09-18 09:34:18.104000+00:00,2020-09-18 09:34:18.104000+00:00 +10166,19620708_1065,98038,25,200000,MORTGAGE,9.0,PERSONAL,23050,14.35,0,2021-08-23 17:47:56.738000+00:00,2021-08-23 17:47:56.738000+00:00 +19518,19791126_8004,77272,22,70000,MORTGAGE,5.0,VENTURE,21000,8.88,0,2021-04-26 13:02:28.798000+00:00,2021-04-26 13:02:28.798000+00:00 +19034,19570504_7835,95112,21,66000,MORTGAGE,4.0,DEBTCONSOLIDATION,7000,7.88,0,2021-05-02 17:05:45.411000+00:00,2021-05-02 17:05:45.411000+00:00 +36821,19510602_3795,35011,44,48000,RENT,4.0,MEDICAL,7500,11.49,0,2020-09-18 00:05:19.892000+00:00,2020-09-18 00:05:19.892000+00:00 +23121,20000504_5024,71670,25,120000,MORTGAGE,5.0,PERSONAL,28000,7.9,0,2021-03-11 14:53:27.485000+00:00,2021-03-11 14:53:27.485000+00:00 +27812,19990405_4432,10546,31,39996,OWN,8.0,HOMEIMPROVEMENT,4000,14.35,0,2021-01-10 19:55:25.025000+00:00,2021-01-10 19:55:25.025000+00:00 +23622,19890728_4868,16621,23,56525,RENT,1.0,MEDICAL,15000,11.48,1,2021-03-05 05:38:09.916000+00:00,2021-03-05 05:38:09.916000+00:00 +27615,19710524_9794,42027,28,94000,RENT,3.0,EDUCATION,3700,7.9,0,2021-01-13 08:11:07.861000+00:00,2021-01-13 08:11:07.861000+00:00 +32686,19510116_2607,32724,31,90000,MORTGAGE,3.0,HOMEIMPROVEMENT,12000,13.48,0,2020-11-09 16:58:36.986000+00:00,2020-11-09 16:58:36.986000+00:00 +12397,19730910_6841,67731,24,34000,MORTGAGE,0.0,MEDICAL,10000,12.18,0,2021-07-26 07:20:26.649000+00:00,2021-07-26 07:20:26.649000+00:00 +33709,19571101_6901,23430,31,120000,MORTGAGE,6.0,MEDICAL,28000,8.9,0,2020-10-27 16:02:35.964000+00:00,2020-10-27 16:02:35.964000+00:00 +28174,19990624_4086,99218,28,42000,MORTGAGE,0.0,DEBTCONSOLIDATION,12000,9.62,0,2021-01-06 05:11:18.798000+00:00,2021-01-06 05:11:18.798000+00:00 +17240,19830714_7087,24646,23,38000,RENT,0.0,MEDICAL,7500,7.88,0,2021-05-25 13:52:36.823000+00:00,2021-05-25 13:52:36.823000+00:00 +34812,19770703_7036,1742,33,72000,MORTGAGE,6.0,VENTURE,4250,5.42,0,2020-10-13 14:38:16.328000+00:00,2020-10-13 14:38:16.328000+00:00 +28863,19630703_1257,43164,29,48000,OWN,6.0,HOMEIMPROVEMENT,8000,13.49,0,2020-12-28 10:25:29.488000+00:00,2020-12-28 10:25:29.488000+00:00 +35849,19610525_6509,28519,36,130000,RENT,0.0,MEDICAL,18000,11.71,0,2020-09-30 09:25:18.048000+00:00,2020-09-30 09:25:18.048000+00:00 +20315,19821014_7092,88061,24,69000,MORTGAGE,1.0,EDUCATION,4900,10.96,1,2021-04-16 09:14:26.359000+00:00,2021-04-16 09:14:26.359000+00:00 +14993,19640106_7239,64501,21,32000,RENT,5.0,PERSONAL,5325,14.27,0,2021-06-23 05:13:46.635000+00:00,2021-06-23 05:13:46.635000+00:00 +20875,19761107_4249,94515,26,40000,RENT,2.0,MEDICAL,12000,7.51,0,2021-04-09 05:56:16.064000+00:00,2021-04-09 05:56:16.064000+00:00 +26988,19900812_7043,44230,32,115000,RENT,1.0,HOMEIMPROVEMENT,2500,11.89,0,2021-01-21 07:59:00.746000+00:00,2021-01-21 07:59:00.746000+00:00 +34122,19460406_6958,16405,31,138000,MORTGAGE,15.0,MEDICAL,24000,10.62,0,2020-10-22 09:42:26.871000+00:00,2020-10-22 09:42:26.871000+00:00 +13091,19750501_8888,45223,25,60000,RENT,3.0,HOMEIMPROVEMENT,10250,6.62,0,2021-07-17 11:02:51.175000+00:00,2021-07-17 11:02:51.175000+00:00 +22690,19780925_4583,97346,26,30000,OWN,3.0,EDUCATION,7500,11.14,0,2021-03-17 02:43:58.766000+00:00,2021-03-17 02:43:58.766000+00:00 +34289,19470214_1107,85018,30,150000,OWN,14.0,PERSONAL,24000,11.83,0,2020-10-20 06:37:21.015000+00:00,2020-10-20 06:37:21.015000+00:00 +17865,19760702_5710,34610,23,60000,MORTGAGE,6.0,EDUCATION,7000,6.54,0,2021-05-17 14:41:26.403000+00:00,2021-05-17 14:41:26.403000+00:00 +20761,19811225_1473,43319,26,34000,RENT,2.0,PERSONAL,12000,14.96,1,2021-04-10 16:48:36.588000+00:00,2021-04-10 16:48:36.588000+00:00 +36689,19500812_8568,70441,36,52000,MORTGAGE,2.0,HOMEIMPROVEMENT,8000,7.51,0,2020-09-19 16:28:02.604000+00:00,2020-09-19 16:28:02.604000+00:00 +16563,19920716_8577,30511,22,65700,RENT,7.0,PERSONAL,6800,10.37,0,2021-06-03 04:58:11.341000+00:00,2021-06-03 04:58:11.341000+00:00 +29878,19571110_3708,82414,33,56000,MORTGAGE,2.0,PERSONAL,11500,7.14,0,2020-12-15 11:56:18.327000+00:00,2020-12-15 11:56:18.327000+00:00 +36431,19880615_5861,68758,36,45000,MORTGAGE,2.0,EDUCATION,7000,7.14,0,2020-09-22 23:23:20.633000+00:00,2020-09-22 23:23:20.633000+00:00 +32107,19951230_5395,60511,27,39996,OWN,1.0,VENTURE,8000,12.53,0,2020-11-17 02:05:30.703000+00:00,2020-11-17 02:05:30.703000+00:00 +17532,19720730_1867,16360,21,53000,MORTGAGE,4.0,PERSONAL,14400,7.49,1,2021-05-21 20:33:16.883000+00:00,2021-05-21 20:33:16.883000+00:00 +13562,19960924_1552,25136,21,40000,OWN,0.0,EDUCATION,2500,14.27,0,2021-07-11 10:58:10.587000+00:00,2021-07-11 10:58:10.587000+00:00 +19719,19500928_5441,22432,25,42000,RENT,3.0,VENTURE,10000,13.8,1,2021-04-23 23:33:21.031000+00:00,2021-04-23 23:33:21.031000+00:00 +36375,19581226_6191,21218,41,45600,RENT,18.0,MEDICAL,5000,6.17,0,2020-09-23 16:31:09.663000+00:00,2020-09-23 16:31:09.663000+00:00 +24876,19900319_2888,1831,24,24000,RENT,2.0,EDUCATION,3500,15.65,1,2021-02-17 06:02:24.147000+00:00,2021-02-17 06:02:24.147000+00:00 +16906,19661112_4480,84412,23,55000,MORTGAGE,0.0,EDUCATION,11075,13.49,0,2021-05-29 20:02:48.535000+00:00,2021-05-29 20:02:48.535000+00:00 +24505,19880205_4971,39154,25,45000,RENT,0.0,EDUCATION,5000,10.74,0,2021-02-21 23:31:41.468000+00:00,2021-02-21 23:31:41.468000+00:00 +10438,19960423_9503,16218,21,18701,MORTGAGE,0.0,PERSONAL,4200,14.11,1,2021-08-20 06:35:41.451000+00:00,2021-08-20 06:35:41.451000+00:00 +17289,19471015_6100,47855,24,57000,MORTGAGE,8.0,VENTURE,14000,14.91,0,2021-05-24 22:53:16.422000+00:00,2021-05-24 22:53:16.422000+00:00 +17337,19800821_6712,29676,25,57200,MORTGAGE,1.0,EDUCATION,2400,7.49,0,2021-05-24 08:12:17.254000+00:00,2021-05-24 08:12:17.254000+00:00 +13901,19840428_4196,60012,22,34900,MORTGAGE,4.0,DEBTCONSOLIDATION,3600,15.7,1,2021-07-07 03:16:12.711000+00:00,2021-07-07 03:16:12.711000+00:00 +21063,19841010_2032,19956,25,83004,MORTGAGE,9.0,PERSONAL,5000,6.76,0,2021-04-06 20:25:44.322000+00:00,2021-04-06 20:25:44.322000+00:00 +13997,19720304_9112,46542,24,41600,MORTGAGE,0.0,HOMEIMPROVEMENT,10000,8.94,0,2021-07-05 21:54:14.375000+00:00,2021-07-05 21:54:14.375000+00:00 +33573,19650225_6508,1803,27,113800,MORTGAGE,3.0,MEDICAL,7500,8.63,0,2020-10-29 09:38:43.607000+00:00,2020-10-29 09:38:43.607000+00:00 +20179,19710409_9932,23141,26,75000,MORTGAGE,2.0,VENTURE,4000,5.79,0,2021-04-18 02:50:34.003000+00:00,2021-04-18 02:50:34.003000+00:00 +35335,19910201_3603,92168,28,55000,OWN,13.0,MEDICAL,18000,7.9,0,2020-10-06 22:39:11.641000+00:00,2020-10-06 22:39:11.641000+00:00 +29175,19990930_1921,36590,32,22800,RENT,1.0,VENTURE,7125,15.28,1,2020-12-24 10:59:04.894000+00:00,2020-12-24 10:59:04.894000+00:00 +32136,20000822_3089,74883,30,34000,RENT,0.0,DEBTCONSOLIDATION,12000,12.87,1,2020-11-16 17:13:14.955000+00:00,2020-11-16 17:13:14.955000+00:00 +11296,19680501_4466,25067,24,36000,RENT,5.0,EDUCATION,1200,11.49,0,2021-08-09 08:08:03.819000+00:00,2021-08-09 08:08:03.819000+00:00 +23907,19980203_5138,55053,24,94000,RENT,8.0,HOMEIMPROVEMENT,16000,11.89,0,2021-03-01 14:27:18.605000+00:00,2021-03-01 14:27:18.605000+00:00 +31204,19800518_5838,19343,29,70000,MORTGAGE,6.0,DEBTCONSOLIDATION,4200,10.99,0,2020-11-28 14:19:03.805000+00:00,2020-11-28 14:19:03.805000+00:00 +35244,19581115_1767,29584,27,56000,RENT,2.0,PERSONAL,14000,10.65,0,2020-10-08 02:29:23.814000+00:00,2020-10-08 02:29:23.814000+00:00 +11670,19570409_1102,93620,24,40000,RENT,2.0,PERSONAL,2000,11.99,0,2021-08-04 13:43:42.800000+00:00,2021-08-04 13:43:42.800000+00:00 +31479,19860121_9051,44454,29,72000,MORTGAGE,9.0,HOMEIMPROVEMENT,1000,7.68,0,2020-11-25 02:11:44.820000+00:00,2020-11-25 02:11:44.820000+00:00 +12097,19620606_9691,66048,24,32000,MORTGAGE,7.0,VENTURE,4500,6.76,0,2021-07-30 03:06:36.450000+00:00,2021-07-30 03:06:36.450000+00:00 +32116,19730127_4796,44212,28,39000,RENT,0.0,DEBTCONSOLIDATION,12000,12.53,1,2020-11-16 23:20:19.609000+00:00,2020-11-16 23:20:19.609000+00:00 +26212,19890714_9960,46017,27,88000,RENT,4.0,DEBTCONSOLIDATION,20000,10.38,0,2021-01-31 05:21:37.298000+00:00,2021-01-31 05:21:37.298000+00:00 +16506,19880310_6939,71138,24,56000,RENT,2.0,VENTURE,6650,12.69,0,2021-06-03 22:24:21.603000+00:00,2021-06-03 22:24:21.603000+00:00 +28847,19980909_4940,56289,31,99000,RENT,0.0,HOMEIMPROVEMENT,5675,13.23,0,2020-12-28 15:19:09.210000+00:00,2020-12-28 15:19:09.210000+00:00 +19818,19480503_8590,30523,23,50000,RENT,5.0,MEDICAL,10000,8.94,0,2021-04-22 17:16:18.997000+00:00,2021-04-22 17:16:18.997000+00:00 +23261,19960913_5222,6825,24,108000,MORTGAGE,1.0,DEBTCONSOLIDATION,25000,13.99,1,2021-03-09 20:03:54.911000+00:00,2021-03-09 20:03:54.911000+00:00 +26687,19990304_9642,80962,29,120000,RENT,3.0,DEBTCONSOLIDATION,15000,9.63,0,2021-01-25 04:03:31.780000+00:00,2021-01-25 04:03:31.780000+00:00 +26025,19560520_4839,94140,30,105000,RENT,4.0,VENTURE,24250,15.65,0,2021-02-02 14:33:47.808000+00:00,2021-02-02 14:33:47.808000+00:00 +25886,19610324_6600,48422,33,190000,MORTGAGE,10.0,HOMEIMPROVEMENT,20000,14.11,0,2021-02-04 09:04:59.149000+00:00,2021-02-04 09:04:59.149000+00:00 +24136,19460514_4704,8731,24,49000,MORTGAGE,0.0,HOMEIMPROVEMENT,5000,12.18,1,2021-02-26 16:24:16.324000+00:00,2021-02-26 16:24:16.324000+00:00 +21465,19960605_9058,4084,23,90000,RENT,0.0,MEDICAL,12000,8.9,0,2021-04-01 17:27:28.788000+00:00,2021-04-01 17:27:28.788000+00:00 +27031,19700913_8016,27976,33,33000,MORTGAGE,3.0,EDUCATION,4000,12.69,0,2021-01-20 18:49:47.741000+00:00,2021-01-20 18:49:47.741000+00:00 +22437,19700826_5706,11432,22,140000,RENT,0.0,MEDICAL,14000,13.11,0,2021-03-20 08:07:30.632000+00:00,2021-03-20 08:07:30.632000+00:00 +34817,19900307_2768,93454,30,70000,MORTGAGE,9.0,PERSONAL,7000,8.9,0,2020-10-13 13:06:30.164000+00:00,2020-10-13 13:06:30.164000+00:00 +31488,19500116_9170,73542,28,53000,RENT,2.0,MEDICAL,10000,11.89,0,2020-11-24 23:26:33.726000+00:00,2020-11-24 23:26:33.726000+00:00 +30308,19721125_2866,32937,32,51400,RENT,1.0,HOMEIMPROVEMENT,8000,7.51,0,2020-12-10 00:24:08.278000+00:00,2020-12-10 00:24:08.278000+00:00 +34066,19500520_6295,74940,29,134000,MORTGAGE,2.0,PERSONAL,8000,14.22,0,2020-10-23 02:50:15.900000+00:00,2020-10-23 02:50:15.900000+00:00 +22163,19630229_2611,51450,21,96000,OWN,1.0,EDUCATION,7000,12.53,0,2021-03-23 19:56:28.383000+00:00,2021-03-23 19:56:28.383000+00:00 +35605,19920416_1122,31560,31,101000,RENT,2.0,VENTURE,7500,13.22,0,2020-10-03 12:03:38.820000+00:00,2020-10-03 12:03:38.820000+00:00 +27500,19671218_8697,97236,27,48000,RENT,3.0,DEBTCONSOLIDATION,10800,15.99,1,2021-01-14 19:21:49.618000+00:00,2021-01-14 19:21:49.618000+00:00 +22741,19850929_8843,13602,26,108000,MORTGAGE,4.0,HOMEIMPROVEMENT,7200,9.62,0,2021-03-16 11:07:55.900000+00:00,2021-03-16 11:07:55.900000+00:00 +27742,19831229_8608,75075,29,39000,MORTGAGE,12.0,PERSONAL,16000,5.99,0,2021-01-11 17:20:11.312000+00:00,2021-01-11 17:20:11.312000+00:00 +31379,19710625_7661,59635,34,72000,OWN,2.0,MEDICAL,13150,6.03,0,2020-11-26 08:47:08.087000+00:00,2020-11-26 08:47:08.087000+00:00 +21787,19911227_3438,30411,24,91000,MORTGAGE,5.0,MEDICAL,15000,7.88,0,2021-03-28 14:57:31.868000+00:00,2021-03-28 14:57:31.868000+00:00 +31465,19950807_4776,29669,28,72000,MORTGAGE,12.0,VENTURE,15000,8.94,0,2020-11-25 06:28:42.078000+00:00,2020-11-25 06:28:42.078000+00:00 +30096,19491228_4585,51501,28,58800,MORTGAGE,11.0,VENTURE,5500,6.76,0,2020-12-12 17:15:09.604000+00:00,2020-12-12 17:15:09.604000+00:00 +17327,19560119_4823,3833,26,57006,MORTGAGE,10.0,DEBTCONSOLIDATION,6000,6.17,0,2021-05-24 11:15:49.580000+00:00,2021-05-24 11:15:49.580000+00:00 +12886,19500617_4745,12209,24,36000,MORTGAGE,8.0,VENTURE,3250,6.39,0,2021-07-20 01:45:23.872000+00:00,2021-07-20 01:45:23.872000+00:00 +32167,19550723_3969,62448,28,81000,MORTGAGE,11.0,HOMEIMPROVEMENT,6000,6.17,0,2020-11-16 07:44:16.743000+00:00,2020-11-16 07:44:16.743000+00:00 +38018,19650210_2955,35759,47,178000,OWN,31.0,VENTURE,9000,10.99,0,2020-09-02 17:55:44.384000+00:00,2020-09-02 17:55:44.384000+00:00 +21338,19580107_6181,12570,22,85000,MORTGAGE,1.0,MEDICAL,25000,12.53,0,2021-04-03 08:18:25.337000+00:00,2021-04-03 08:18:25.337000+00:00 +10373,19830416_6544,23607,26,62004,RENT,9.0,DEBTCONSOLIDATION,24000,7.9,1,2021-08-21 02:28:41.575000+00:00,2021-08-21 02:28:41.575000+00:00 +30494,19991106_4360,38821,29,55000,MORTGAGE,0.0,VENTURE,5000,12.73,1,2020-12-07 15:30:19.001000+00:00,2020-12-07 15:30:19.001000+00:00 +30699,19510622_4732,77661,27,32000,RENT,2.0,MEDICAL,8600,16.02,1,2020-12-05 00:47:46.304000+00:00,2020-12-05 00:47:46.304000+00:00 +33511,19580407_9371,10466,29,110000,MORTGAGE,0.0,VENTURE,15000,8.94,0,2020-10-30 04:36:40.033000+00:00,2020-10-30 04:36:40.033000+00:00 +11298,19510715_6024,75074,21,40000,RENT,5.0,VENTURE,1200,15.58,0,2021-08-09 07:31:21.354000+00:00,2021-08-09 07:31:21.354000+00:00 +34538,19760509_7255,18421,31,741600,MORTGAGE,3.0,MEDICAL,12000,10.25,0,2020-10-17 02:27:14.080000+00:00,2020-10-17 02:27:14.080000+00:00 +31930,19920610_7089,54530,33,70000,RENT,0.0,EDUCATION,11200,8.94,0,2020-11-19 08:14:08.886000+00:00,2020-11-19 08:14:08.886000+00:00 +26170,19540422_2550,71351,27,70000,RENT,4.0,EDUCATION,20000,11.48,0,2021-01-31 18:12:29.071000+00:00,2021-01-31 18:12:29.071000+00:00 +37566,19910914_9213,12926,38,90000,MORTGAGE,1.0,DEBTCONSOLIDATION,7000,6.99,0,2020-09-08 12:11:41.552000+00:00,2020-09-08 12:11:41.552000+00:00 +31122,19500614_6573,92377,28,63000,MORTGAGE,5.0,DEBTCONSOLIDATION,9000,13.72,1,2020-11-29 15:24:04.884000+00:00,2020-11-29 15:24:04.884000+00:00 +19653,19620513_7237,50006,26,48000,RENT,10.0,MEDICAL,4200,6.92,0,2021-04-24 19:44:42.388000+00:00,2021-04-24 19:44:42.388000+00:00 +27209,19910530_1082,37414,31,64800,RENT,3.0,VENTURE,3000,13.98,0,2021-01-18 12:22:48.325000+00:00,2021-01-18 12:22:48.325000+00:00 +11811,19460609_4577,80918,24,20000,RENT,0.0,DEBTCONSOLIDATION,2400,15.62,1,2021-08-02 18:35:48.994000+00:00,2021-08-02 18:35:48.994000+00:00 +22899,19600605_5105,36869,22,110196,MORTGAGE,6.0,MEDICAL,25000,11.49,0,2021-03-14 10:48:01.138000+00:00,2021-03-14 10:48:01.138000+00:00 +28894,19500811_3625,12734,28,20400,RENT,12.0,EDUCATION,6000,6.03,0,2020-12-28 00:56:31.275000+00:00,2020-12-28 00:56:31.275000+00:00 +28255,19780609_3754,16507,27,40000,RENT,1.0,MEDICAL,5000,11.99,0,2021-01-05 04:24:38.952000+00:00,2021-01-05 04:24:38.952000+00:00 +17647,19520419_7564,13440,23,29000,RENT,7.0,EDUCATION,8000,12.73,1,2021-05-20 09:22:35.126000+00:00,2021-05-20 09:22:35.126000+00:00 +20977,19450704_8207,43616,26,82000,MORTGAGE,10.0,PERSONAL,3200,5.99,0,2021-04-07 22:44:10.331000+00:00,2021-04-07 22:44:10.331000+00:00 +16341,19960102_1171,94558,22,50724,RENT,7.0,EDUCATION,6300,15.96,0,2021-06-06 00:52:44.994000+00:00,2021-06-06 00:52:44.994000+00:00 +29489,19770822_2710,42722,28,59000,RENT,0.0,HOMEIMPROVEMENT,6500,16.35,1,2020-12-20 10:55:57.836000+00:00,2020-12-20 10:55:57.836000+00:00 +34030,19580820_4102,38574,28,130000,MORTGAGE,1.0,EDUCATION,5600,15.65,0,2020-10-23 13:51:00.276000+00:00,2020-10-23 13:51:00.276000+00:00 +13605,19580503_9191,95961,22,60000,RENT,0.0,EDUCATION,4400,15.23,0,2021-07-10 21:48:57.582000+00:00,2021-07-10 21:48:57.582000+00:00 +23938,19700122_3062,49831,23,195000,MORTGAGE,0.0,EDUCATION,20000,9.63,0,2021-03-01 04:58:20.392000+00:00,2021-03-01 04:58:20.392000+00:00 +21377,19510407_5225,37880,23,86000,MORTGAGE,5.0,EDUCATION,21000,8.9,0,2021-04-02 20:22:37.263000+00:00,2021-04-02 20:22:37.263000+00:00 +38398,19820419_1456,78335,57,15000,MORTGAGE,1.0,DEBTCONSOLIDATION,3000,12.68,1,2020-08-28 21:41:15.969000+00:00,2020-08-28 21:41:15.969000+00:00 +27634,19690506_2859,12477,28,55000,RENT,2.0,MEDICAL,3900,11.99,0,2021-01-13 02:22:24.440000+00:00,2021-01-13 02:22:24.440000+00:00 +33424,19461113_2724,35582,35,108717,MORTGAGE,9.0,DEBTCONSOLIDATION,16000,6.99,0,2020-10-31 07:13:27.275000+00:00,2020-10-31 07:13:27.275000+00:00 +27118,19551107_5811,14784,32,46000,RENT,0.0,HOMEIMPROVEMENT,3000,16.29,1,2021-01-19 16:13:00.499000+00:00,2021-01-19 16:13:00.499000+00:00 +23593,19511001_8249,26555,26,138000,MORTGAGE,0.0,DEBTCONSOLIDATION,7500,6.03,0,2021-03-05 14:30:25.664000+00:00,2021-03-05 14:30:25.664000+00:00 +31240,19821005_6770,23306,27,70000,OWN,2.0,HOMEIMPROVEMENT,2250,10.36,0,2020-11-28 03:18:19.429000+00:00,2020-11-28 03:18:19.429000+00:00 +36446,19990823_8068,98324,37,59000,RENT,0.0,PERSONAL,5000,14.84,1,2020-09-22 18:48:02.143000+00:00,2020-09-22 18:48:02.143000+00:00 +19106,19880528_1268,36776,23,67000,MORTGAGE,2.0,DEBTCONSOLIDATION,5600,7.49,0,2021-05-01 19:04:16.659000+00:00,2021-05-01 19:04:16.659000+00:00 +37298,19471127_3941,33016,37,65000,RENT,2.0,DEBTCONSOLIDATION,10000,5.42,0,2020-09-11 22:10:31.908000+00:00,2020-09-11 22:10:31.908000+00:00 +26505,19970319_6443,48383,29,38000,RENT,5.0,EDUCATION,1000,7.88,0,2021-01-27 11:43:56.126000+00:00,2021-01-27 11:43:56.126000+00:00 +33734,19760924_9148,32082,27,50000,RENT,1.0,VENTURE,15000,13.16,0,2020-10-27 08:23:45.147000+00:00,2020-10-27 08:23:45.147000+00:00 +17662,19490806_4167,21843,24,60000,MORTGAGE,8.0,EDUCATION,3000,10.65,0,2021-05-20 04:47:16.636000+00:00,2021-05-20 04:47:16.636000+00:00 +31701,19951225_8760,37769,29,75000,MORTGAGE,3.0,PERSONAL,4000,10.75,0,2020-11-22 06:17:11.167000+00:00,2020-11-22 06:17:11.167000+00:00 +38617,19581220_3576,44901,56,70000,MORTGAGE,7.0,PERSONAL,15000,10.38,0,2020-08-26 02:41:46.014000+00:00,2020-08-26 02:41:46.014000+00:00 +12757,19790318_1562,87402,22,38400,RENT,4.0,EDUCATION,3500,13.99,0,2021-07-21 17:13:02.887000+00:00,2021-07-21 17:13:02.887000+00:00 +29516,19561122_2280,88013,34,53000,MORTGAGE,7.0,DEBTCONSOLIDATION,3100,7.9,0,2020-12-20 02:40:24.553000+00:00,2020-12-20 02:40:24.553000+00:00 +17790,19551025_5350,30147,26,45000,RENT,3.0,PERSONAL,8000,15.33,0,2021-05-18 13:37:58.854000+00:00,2021-05-18 13:37:58.854000+00:00 +25573,19490418_3424,62015,22,48000,RENT,1.0,EDUCATION,7200,9.62,0,2021-02-08 08:49:44.975000+00:00,2021-02-08 08:49:44.975000+00:00 +12336,19551112_1917,45140,23,52000,RENT,0.0,EDUCATION,3000,5.99,0,2021-07-27 02:00:01.842000+00:00,2021-07-27 02:00:01.842000+00:00 +17995,19850730_1705,17229,22,60000,MORTGAGE,6.0,DEBTCONSOLIDATION,6000,13.11,0,2021-05-15 22:55:26.156000+00:00,2021-05-15 22:55:26.156000+00:00 +30548,19590901_6830,40701,30,73008,RENT,5.0,VENTURE,8000,9.99,0,2020-12-06 22:59:12.437000+00:00,2020-12-06 22:59:12.437000+00:00 +24898,19451008_8370,53922,22,24432,RENT,0.0,EDUCATION,2000,11.36,0,2021-02-16 23:18:37.028000+00:00,2021-02-16 23:18:37.028000+00:00 +16570,19751112_5580,52561,25,70000,RENT,5.0,HOMEIMPROVEMENT,6800,5.42,0,2021-06-03 02:49:42.712000+00:00,2021-06-03 02:49:42.712000+00:00 +15119,19520314_5139,40052,26,51600,RENT,10.0,HOMEIMPROVEMENT,5500,11.99,1,2021-06-21 14:41:11.318000+00:00,2021-06-21 14:41:11.318000+00:00 +13412,19480802_2047,57567,22,42000,RENT,3.0,MEDICAL,4000,8.88,0,2021-07-13 08:51:15.488000+00:00,2021-07-13 08:51:15.488000+00:00 +36819,19771227_8119,54626,39,57000,MORTGAGE,16.0,EDUCATION,6300,10.99,0,2020-09-18 00:42:02.357000+00:00,2020-09-18 00:42:02.357000+00:00 +26659,19610123_4370,66213,31,29760,MORTGAGE,0.0,MEDICAL,10000,9.63,0,2021-01-25 12:37:26.295000+00:00,2021-01-25 12:37:26.295000+00:00 +38476,19830614_1921,40555,63,37000,RENT,24.0,DEBTCONSOLIDATION,5450,15.96,1,2020-08-27 21:49:39.821000+00:00,2020-08-27 21:49:39.821000+00:00 +30342,19701009_8821,42754,27,56400,RENT,4.0,PERSONAL,8000,16.77,0,2020-12-09 14:00:06.367000+00:00,2020-12-09 14:00:06.367000+00:00 +34965,19700808_4478,54474,29,200000,RENT,14.0,PERSONAL,16000,12.69,0,2020-10-11 15:50:07.729000+00:00,2020-10-11 15:50:07.729000+00:00 +16461,20010813_9747,60517,26,46800,MORTGAGE,0.0,MEDICAL,5000,12.87,1,2021-06-04 12:10:17.074000+00:00,2021-06-04 12:10:17.074000+00:00 +37598,19560911_1699,48005,39,90000,MORTGAGE,9.0,HOMEIMPROVEMENT,22000,12.67,0,2020-09-08 02:24:22.106000+00:00,2020-09-08 02:24:22.106000+00:00 +38537,19761123_3267,36785,56,71884,OWN,11.0,PERSONAL,7425,9.99,0,2020-08-27 03:10:04.628000+00:00,2020-08-27 03:10:04.628000+00:00 +32384,19680305_2152,62549,29,62050,MORTGAGE,13.0,MEDICAL,3600,17.99,1,2020-11-13 13:21:29.253000+00:00,2020-11-13 13:21:29.253000+00:00 +24366,19971215_9589,36037,23,84000,RENT,0.0,EDUCATION,12500,6.54,0,2021-02-23 18:02:52.809000+00:00,2021-02-23 18:02:52.809000+00:00 +11360,19600923_9625,60184,23,30000,RENT,1.0,DEBTCONSOLIDATION,1500,7.51,0,2021-08-08 12:33:24.928000+00:00,2021-08-08 12:33:24.928000+00:00 +21274,19681017_9396,92513,22,42000,RENT,6.0,PERSONAL,3000,11.49,0,2021-04-04 03:53:04.228000+00:00,2021-04-04 03:53:04.228000+00:00 +33972,19870929_1651,22737,27,116899,MORTGAGE,12.0,DEBTCONSOLIDATION,12000,10.75,1,2020-10-24 07:35:31.771000+00:00,2020-10-24 07:35:31.771000+00:00 +30960,20010721_2412,71419,30,71400,RENT,0.0,HOMEIMPROVEMENT,9600,12.61,0,2020-12-01 16:57:24.576000+00:00,2020-12-01 16:57:24.576000+00:00 +26218,19610525_5134,67570,27,81500,RENT,2.0,HOMEIMPROVEMENT,20000,17.51,1,2021-01-31 03:31:29.902000+00:00,2021-01-31 03:31:29.902000+00:00 +34312,19980726_4104,71369,30,151800,MORTGAGE,14.0,HOMEIMPROVEMENT,17525,12.72,0,2020-10-19 23:35:12.663000+00:00,2020-10-19 23:35:12.663000+00:00 +22738,19990517_7961,24366,22,35000,RENT,0.0,VENTURE,2000,12.99,0,2021-03-16 12:02:59.598000+00:00,2021-03-16 12:02:59.598000+00:00 +27480,19950315_2466,12546,34,93000,RENT,11.0,HOMEIMPROVEMENT,11000,12.53,0,2021-01-15 01:28:54.272000+00:00,2021-01-15 01:28:54.272000+00:00 +34799,19791129_7157,47142,33,103000,RENT,0.0,EDUCATION,9000,14.65,1,2020-10-13 18:36:52.352000+00:00,2020-10-13 18:36:52.352000+00:00 +17158,19960225_2287,18651,22,45999,RENT,1.0,VENTURE,7300,7.88,0,2021-05-26 14:57:37.902000+00:00,2021-05-26 14:57:37.902000+00:00 +35661,19711012_6432,68132,40,233000,MORTGAGE,6.0,MEDICAL,15000,11.83,0,2020-10-02 18:55:49.790000+00:00,2020-10-02 18:55:49.790000+00:00 +34244,19540905_7240,35739,29,83500,RENT,1.0,HOMEIMPROVEMENT,15000,13.43,0,2020-10-20 20:23:16.485000+00:00,2020-10-20 20:23:16.485000+00:00 +23297,19870501_9457,2140,25,16200,RENT,2.0,MEDICAL,1500,6.91,0,2021-03-09 09:03:10.535000+00:00,2021-03-09 09:03:10.535000+00:00 +27407,19940915_6221,10590,27,25000,RENT,0.0,PERSONAL,3600,13.16,0,2021-01-15 23:48:44.257000+00:00,2021-01-15 23:48:44.257000+00:00 +36530,19910801_6982,1719,38,48000,MORTGAGE,1.0,PERSONAL,18000,8.49,0,2020-09-21 17:06:18.599000+00:00,2020-09-21 17:06:18.599000+00:00 +32624,19640723_1144,53177,33,36000,RENT,5.0,EDUCATION,12200,9.99,1,2020-11-10 11:56:33.412000+00:00,2020-11-10 11:56:33.412000+00:00 +34601,19591021_9217,79520,35,62475,MORTGAGE,5.0,MEDICAL,9600,14.61,1,2020-10-16 07:10:56.421000+00:00,2020-10-16 07:10:56.421000+00:00 +33226,19990219_5275,99672,27,100000,MORTGAGE,2.0,MEDICAL,2000,8.0,0,2020-11-02 19:47:31.344000+00:00,2020-11-02 19:47:31.344000+00:00 +15847,19491124_9988,62640,24,59400,RENT,8.0,DEBTCONSOLIDATION,6000,7.9,0,2021-06-12 07:59:33.934000+00:00,2021-06-12 07:59:33.934000+00:00 +23067,19650126_1601,46405,26,26400,OWN,8.0,HOMEIMPROVEMENT,8000,5.79,0,2021-03-12 07:24:34.049000+00:00,2021-03-12 07:24:34.049000+00:00 +24124,20010608_6991,37160,24,45000,RENT,8.0,PERSONAL,8000,12.73,0,2021-02-26 20:04:31.116000+00:00,2021-02-26 20:04:31.116000+00:00 +21827,19550615_4211,8837,24,92000,MORTGAGE,7.0,PERSONAL,15000,11.11,0,2021-03-28 02:43:22.561000+00:00,2021-03-28 02:43:22.561000+00:00 +28593,19540626_2082,98043,33,66000,RENT,17.0,EDUCATION,5000,7.29,0,2020-12-31 21:01:02.309000+00:00,2020-12-31 21:01:02.309000+00:00 +30966,20000216_4918,38390,32,80000,RENT,6.0,DEBTCONSOLIDATION,9600,11.14,0,2020-12-01 15:07:17.180000+00:00,2020-12-01 15:07:17.180000+00:00 +24815,19510223_6038,76059,25,51000,MORTGAGE,6.0,EDUCATION,16750,16.4,0,2021-02-18 00:41:59.340000+00:00,2021-02-18 00:41:59.340000+00:00 +22970,19921103_6067,91125,23,114000,OWN,7.0,VENTURE,6000,8.49,0,2021-03-13 13:04:53.618000+00:00,2021-03-13 13:04:53.618000+00:00 +31802,19790604_1488,43532,30,85000,RENT,4.0,HOMEIMPROVEMENT,10000,6.62,0,2020-11-20 23:23:26.667000+00:00,2020-11-20 23:23:26.667000+00:00 +18145,19720929_2910,91916,22,61000,MORTGAGE,6.0,MEDICAL,12000,13.49,0,2021-05-14 01:02:21.256000+00:00,2021-05-14 01:02:21.256000+00:00 +34282,19840325_7595,49544,29,150000,MORTGAGE,0.0,HOMEIMPROVEMENT,10700,14.22,0,2020-10-20 08:45:49.643000+00:00,2020-10-20 08:45:49.643000+00:00 +30480,19470828_5778,49525,30,61000,MORTGAGE,2.0,DEBTCONSOLIDATION,4000,7.9,0,2020-12-07 19:47:16.259000+00:00,2020-12-07 19:47:16.259000+00:00 +31371,19940317_5655,95919,35,71500,MORTGAGE,1.0,HOMEIMPROVEMENT,19200,10.99,0,2020-11-26 11:13:57.949000+00:00,2020-11-26 11:13:57.949000+00:00 +25731,19961013_1223,48021,30,300000,MORTGAGE,13.0,EDUCATION,4000,10.65,0,2021-02-06 08:29:50.213000+00:00,2021-02-06 08:29:50.213000+00:00 +23256,19460121_8415,54428,21,51000,RENT,5.0,VENTURE,1600,15.62,1,2021-03-09 21:35:41.074000+00:00,2021-03-09 21:35:41.074000+00:00 +19150,19950320_6494,4488,22,67440,MORTGAGE,6.0,EDUCATION,5500,10.25,0,2021-05-01 05:36:42.421000+00:00,2021-05-01 05:36:42.421000+00:00 +29992,19671028_9161,65534,27,51600,MORTGAGE,2.0,VENTURE,13650,16.29,1,2020-12-14 01:03:57.802000+00:00,2020-12-14 01:03:57.802000+00:00 +23960,19610817_1645,3826,24,135000,RENT,3.0,EDUCATION,16000,16.69,0,2021-02-28 22:14:33.274000+00:00,2021-02-28 22:14:33.274000+00:00 +35809,20000124_8454,75487,40,73100,RENT,0.0,MEDICAL,20000,14.54,1,2020-09-30 21:39:27.355000+00:00,2020-09-30 21:39:27.355000+00:00 +30164,19940108_8290,97101,32,29000,RENT,0.0,EDUCATION,8000,18.25,1,2020-12-11 20:27:05.783000+00:00,2020-12-11 20:27:05.783000+00:00 +22354,19951008_5271,62051,26,75000,RENT,4.0,EDUCATION,14000,11.11,0,2021-03-21 09:30:52.943000+00:00,2021-03-21 09:30:52.943000+00:00 +15328,19610515_2615,59841,24,48000,MORTGAGE,5.0,HOMEIMPROVEMENT,17000,6.91,0,2021-06-18 22:45:13.690000+00:00,2021-06-18 22:45:13.690000+00:00 +24513,19610625_2694,68745,25,47000,RENT,1.0,VENTURE,8500,8.9,0,2021-02-21 21:04:51.607000+00:00,2021-02-21 21:04:51.607000+00:00 +27164,19811005_2518,38963,31,55000,RENT,3.0,HOMEIMPROVEMENT,12000,13.23,0,2021-01-19 02:08:43.796000+00:00,2021-01-19 02:08:43.796000+00:00 +12773,19680130_2278,72650,24,44000,RENT,3.0,EDUCATION,3500,12.69,0,2021-07-21 12:19:23.164000+00:00,2021-07-21 12:19:23.164000+00:00 +23465,19780829_8413,14757,23,130000,MORTGAGE,7.0,DEBTCONSOLIDATION,6000,5.79,0,2021-03-07 05:39:43.446000+00:00,2021-03-07 05:39:43.446000+00:00 +19971,19450726_6507,13734,24,59600,RENT,8.0,HOMEIMPROVEMENT,10000,5.99,0,2021-04-20 18:28:10.398000+00:00,2021-04-20 18:28:10.398000+00:00 +21876,19460824_5649,21234,23,93000,MORTGAGE,5.0,DEBTCONSOLIDATION,2000,7.9,0,2021-03-27 11:44:02.160000+00:00,2021-03-27 11:44:02.160000+00:00 +35742,19790809_8840,36088,39,55059,RENT,16.0,DEBTCONSOLIDATION,24000,12.53,1,2020-10-01 18:09:09.944000+00:00,2020-10-01 18:09:09.944000+00:00 +32892,19920219_5973,78739,27,33600,OWN,1.0,PERSONAL,21250,8.88,0,2020-11-07 01:57:43.056000+00:00,2020-11-07 01:57:43.056000+00:00 +34015,19930830_8701,24431,34,130000,MORTGAGE,5.0,MEDICAL,6450,13.23,0,2020-10-23 18:26:18.767000+00:00,2020-10-23 18:26:18.767000+00:00 +27994,19721118_8989,31605,31,25000,RENT,0.0,MEDICAL,4800,11.54,0,2021-01-08 12:15:00.679000+00:00,2021-01-08 12:15:00.679000+00:00 +18431,19880511_4031,27807,22,32000,RENT,3.0,EDUCATION,8400,11.49,0,2021-05-10 09:33:08.712000+00:00,2021-05-10 09:33:08.712000+00:00 +37210,19950611_9270,47438,37,70000,MORTGAGE,7.0,MEDICAL,25000,9.76,0,2020-09-13 01:05:40.383000+00:00,2020-09-13 01:05:40.383000+00:00 +21450,19750621_1855,32328,24,87000,MORTGAGE,7.0,HOMEIMPROVEMENT,25000,10.99,0,2021-04-01 22:02:47.278000+00:00,2021-04-01 22:02:47.278000+00:00 +37151,19950608_9060,16373,37,32000,RENT,4.0,DEBTCONSOLIDATION,10000,13.79,1,2020-09-13 19:08:33.110000+00:00,2020-09-13 19:08:33.110000+00:00 +21764,19461005_6797,66781,25,46200,RENT,1.0,DEBTCONSOLIDATION,2700,15.96,1,2021-03-28 21:59:40.219000+00:00,2021-03-28 21:59:40.219000+00:00 +30456,19711116_1945,76034,34,60100,MORTGAGE,6.0,EDUCATION,1500,6.03,0,2020-12-08 03:07:45.843000+00:00,2020-12-08 03:07:45.843000+00:00 +33358,19770804_8916,37130,33,90000,RENT,8.0,DEBTCONSOLIDATION,14500,14.65,0,2020-11-01 03:24:48.631000+00:00,2020-11-01 03:24:48.631000+00:00 +30863,19480222_5478,72956,29,54996,RENT,8.0,MEDICAL,9300,9.32,0,2020-12-02 22:37:44.146000+00:00,2020-12-02 22:37:44.146000+00:00 +11796,19731009_8512,47038,25,44400,RENT,6.0,EDUCATION,2350,13.98,0,2021-08-02 23:11:07.484000+00:00,2021-08-02 23:11:07.484000+00:00 +10237,19500102_1085,46366,25,66300,RENT,3.0,MEDICAL,25000,17.93,1,2021-08-22 20:04:49.218000+00:00,2021-08-22 20:04:49.218000+00:00 +33347,19910104_4528,21403,34,105000,MORTGAGE,2.0,MEDICAL,10000,7.51,0,2020-11-01 06:46:42.191000+00:00,2020-11-01 06:46:42.191000+00:00 +21295,19490717_5702,58344,23,85000,MORTGAGE,3.0,EDUCATION,4000,6.76,0,2021-04-03 21:27:38.342000+00:00,2021-04-03 21:27:38.342000+00:00 +11686,19550102_1482,81323,26,62400,RENT,3.0,PERSONAL,2000,12.53,0,2021-08-04 08:50:03.078000+00:00,2021-08-04 08:50:03.078000+00:00 +34453,19660204_1508,46240,29,300000,RENT,1.0,HOMEIMPROVEMENT,16000,11.58,0,2020-10-18 04:27:18.857000+00:00,2020-10-18 04:27:18.857000+00:00 +35166,19490220_6803,7432,27,66240,RENT,7.0,MEDICAL,5000,5.79,0,2020-10-09 02:20:59.962000+00:00,2020-10-09 02:20:59.962000+00:00 +19731,19860521_3415,29729,26,72000,MORTGAGE,10.0,MEDICAL,17000,10.65,0,2021-04-23 19:53:06.239000+00:00,2021-04-23 19:53:06.239000+00:00 +32628,19470903_9768,60044,28,28800,RENT,1.0,VENTURE,12250,10.59,1,2020-11-10 10:43:08.481000+00:00,2020-11-10 10:43:08.481000+00:00 +25168,19820206_8182,21919,23,32640,RENT,5.0,VENTURE,4800,11.99,0,2021-02-13 12:43:04.207000+00:00,2021-02-13 12:43:04.207000+00:00 +32265,19710123_6186,31778,28,83135,MORTGAGE,3.0,EDUCATION,8000,13.99,0,2020-11-15 01:45:35.941000+00:00,2020-11-15 01:45:35.941000+00:00 +12135,19510118_6401,77624,23,32004,OWN,0.0,EDUCATION,9250,17.19,0,2021-07-29 15:29:09.608000+00:00,2021-07-29 15:29:09.608000+00:00 +13792,19450220_8508,45810,22,40000,MORTGAGE,6.0,DEBTCONSOLIDATION,7000,9.32,0,2021-07-08 12:36:47.073000+00:00,2021-07-08 12:36:47.073000+00:00 +25405,19740507_4208,37857,21,65004,MORTGAGE,5.0,EDUCATION,5000,9.99,0,2021-02-10 12:13:12.064000+00:00,2021-02-10 12:13:12.064000+00:00 +36874,19480326_5471,38625,42,30000,RENT,5.0,VENTURE,8000,14.96,0,2020-09-17 07:52:34.560000+00:00,2020-09-17 07:52:34.560000+00:00 +12131,19480312_9985,73143,22,29376,RENT,1.0,EDUCATION,2800,7.43,0,2021-07-29 16:42:34.539000+00:00,2021-07-29 16:42:34.539000+00:00 +20570,19730416_9675,80904,26,61754,MORTGAGE,7.0,MEDICAL,10000,14.83,1,2021-04-13 03:14:12.028000+00:00,2021-04-13 03:14:12.028000+00:00 +10780,19690621_4640,38732,23,39600,RENT,5.0,EDUCATION,19000,10.62,1,2021-08-15 21:58:39.878000+00:00,2021-08-15 21:58:39.878000+00:00 +18239,19910430_3275,63301,26,62000,MORTGAGE,1.0,DEBTCONSOLIDATION,8400,6.62,0,2021-05-12 20:17:05.385000+00:00,2021-05-12 20:17:05.385000+00:00 +25547,19841109_1212,22842,21,47500,RENT,2.0,EDUCATION,7500,10.99,0,2021-02-08 16:46:57.025000+00:00,2021-02-08 16:46:57.025000+00:00 +11912,19590125_6131,51240,26,77508,RENT,10.0,VENTURE,13500,12.87,0,2021-08-01 11:42:04.494000+00:00,2021-08-01 11:42:04.494000+00:00 +35869,19881208_1257,70452,36,25000,MORTGAGE,0.0,HOMEIMPROVEMENT,3000,8.7,0,2020-09-30 03:18:13.394000+00:00,2020-09-30 03:18:13.394000+00:00 +12643,19870606_1700,74652,22,75000,RENT,6.0,EDUCATION,3200,10.28,0,2021-07-23 04:05:23.412000+00:00,2021-07-23 04:05:23.412000+00:00 +10183,19661226_9065,29689,25,72612,RENT,9.0,MEDICAL,25000,13.11,1,2021-08-23 12:35:55.782000+00:00,2021-08-23 12:35:55.782000+00:00 +30399,19700406_4808,40941,29,60000,MORTGAGE,8.0,HOMEIMPROVEMENT,16000,14.96,0,2020-12-08 20:33:56.105000+00:00,2020-12-08 20:33:56.105000+00:00 +26719,19691014_7980,67457,27,51000,RENT,1.0,HOMEIMPROVEMENT,2000,7.74,0,2021-01-24 18:16:12.334000+00:00,2021-01-24 18:16:12.334000+00:00 +37241,19970209_1933,2667,50,72000,MORTGAGE,13.0,MEDICAL,8000,11.49,0,2020-09-12 15:36:42.170000+00:00,2020-09-12 15:36:42.170000+00:00 +27227,19710212_2724,17257,33,75500,RENT,5.0,VENTURE,3000,12.42,0,2021-01-18 06:52:26.137000+00:00,2021-01-18 06:52:26.137000+00:00 +36117,19510809_4123,13411,41,39000,RENT,2.0,PERSONAL,3500,13.11,0,2020-09-26 23:26:27.692000+00:00,2020-09-26 23:26:27.692000+00:00 +13867,19780230_3383,90806,26,45000,RENT,1.0,MEDICAL,4800,5.42,0,2021-07-07 13:40:14.622000+00:00,2021-07-07 13:40:14.622000+00:00 +24296,19600201_4209,27609,22,27996,RENT,0.0,EDUCATION,2200,15.95,1,2021-02-24 15:27:39.096000+00:00,2021-02-24 15:27:39.096000+00:00 +34822,19860304_5438,63332,29,64000,RENT,3.0,DEBTCONSOLIDATION,15000,15.27,1,2020-10-13 11:34:44.001000+00:00,2020-10-13 11:34:44.001000+00:00 +17559,19800308_6778,98338,24,27000,RENT,3.0,VENTURE,8000,8.9,0,2021-05-21 12:17:43.601000+00:00,2021-05-21 12:17:43.601000+00:00 +15749,19520113_6906,71105,25,50000,RENT,1.0,MEDICAL,6000,14.79,0,2021-06-13 13:58:14.735000+00:00,2021-06-13 13:58:14.735000+00:00 +37361,19450317_3985,27619,38,77250,OWN,4.0,VENTURE,3000,8.59,0,2020-09-11 02:54:14.249000+00:00,2020-09-11 02:54:14.249000+00:00 +32135,19480621_2491,13153,28,34000,RENT,0.0,EDUCATION,12000,14.61,1,2020-11-16 17:31:36.188000+00:00,2020-11-16 17:31:36.188000+00:00 +30159,19700506_2597,18346,28,33000,RENT,12.0,DEBTCONSOLIDATION,8000,14.65,1,2020-12-11 21:58:51.946000+00:00,2020-12-11 21:58:51.946000+00:00 +24243,19520315_8466,44646,22,30000,RENT,4.0,EDUCATION,10000,11.86,1,2021-02-25 07:40:24.428000+00:00,2021-02-25 07:40:24.428000+00:00 +10708,19670704_6846,44830,22,22000,MORTGAGE,6.0,PERSONAL,7200,7.14,0,2021-08-16 20:00:08.630000+00:00,2021-08-16 20:00:08.630000+00:00 +34025,19920522_8812,60434,33,118000,MORTGAGE,5.0,DEBTCONSOLIDATION,25000,14.96,1,2020-10-23 15:22:46.440000+00:00,2020-10-23 15:22:46.440000+00:00 +38038,19890221_1496,27878,42,300000,RENT,26.0,MEDICAL,16000,9.63,0,2020-09-02 11:48:39.731000+00:00,2020-09-02 11:48:39.731000+00:00 +12666,19750124_1330,61028,24,35004,MORTGAGE,5.0,EDUCATION,7000,5.42,0,2021-07-22 21:03:15.060000+00:00,2021-07-22 21:03:15.060000+00:00 +37283,19850806_8554,43525,46,74500,MORTGAGE,0.0,VENTURE,10500,12.09,0,2020-09-12 02:45:50.398000+00:00,2020-09-12 02:45:50.398000+00:00 +24414,19711002_7371,84765,24,51000,MORTGAGE,4.0,EDUCATION,8500,8.94,0,2021-02-23 03:21:53.641000+00:00,2021-02-23 03:21:53.641000+00:00 +20353,19870730_5089,68002,26,59497,OWN,2.0,MEDICAL,7000,17.88,1,2021-04-15 21:36:59.518000+00:00,2021-04-15 21:36:59.518000+00:00 +38099,19760512_5496,75931,39,84000,OWN,12.0,VENTURE,9875,7.51,0,2020-09-01 17:09:04.538000+00:00,2020-09-01 17:09:04.538000+00:00 +28161,19521203_3450,97388,27,28968,RENT,1.0,PERSONAL,5000,13.16,0,2021-01-06 09:09:54.823000+00:00,2021-01-06 09:09:54.823000+00:00 +37342,19780913_4168,45246,38,76000,MORTGAGE,4.0,PERSONAL,9000,5.99,0,2020-09-11 08:42:57.670000+00:00,2020-09-11 08:42:57.670000+00:00 +26467,19700105_2778,76503,28,55000,RENT,5.0,PERSONAL,16000,13.98,0,2021-01-27 23:21:22.967000+00:00,2021-01-27 23:21:22.967000+00:00 +22169,19971012_1674,61434,24,42996,RENT,2.0,MEDICAL,14000,7.51,1,2021-03-23 18:06:20.987000+00:00,2021-03-23 18:06:20.987000+00:00 +25340,19780612_9832,74960,26,45000,MORTGAGE,6.0,VENTURE,5000,6.99,0,2021-02-11 08:06:12.188000+00:00,2021-02-11 08:06:12.188000+00:00 +32989,19570829_3300,12542,29,84000,MORTGAGE,6.0,DEBTCONSOLIDATION,8000,14.96,1,2020-11-05 20:17:23.487000+00:00,2020-11-05 20:17:23.487000+00:00 +35091,19460914_1042,98597,30,66888,MORTGAGE,6.0,HOMEIMPROVEMENT,15000,12.23,0,2020-10-10 01:17:32.412000+00:00,2020-10-10 01:17:32.412000+00:00 +33284,19821216_3789,24550,29,102000,OWN,10.0,MEDICAL,18000,9.33,0,2020-11-02 02:02:59.849000+00:00,2020-11-02 02:02:59.849000+00:00 +32079,20000126_1542,55005,27,35000,RENT,0.0,EDUCATION,12000,7.29,1,2020-11-17 10:39:25.218000+00:00,2020-11-17 10:39:25.218000+00:00 +36910,19880811_5009,15825,42,60000,MORTGAGE,4.0,HOMEIMPROVEMENT,8850,10.37,0,2020-09-16 20:51:50.184000+00:00,2020-09-16 20:51:50.184000+00:00 +34569,19870430_8371,47995,28,72000,MORTGAGE,9.0,HOMEIMPROVEMENT,18000,11.89,0,2020-10-16 16:58:15.867000+00:00,2020-10-16 16:58:15.867000+00:00 +27921,19890823_9776,16262,35,39000,RENT,8.0,DEBTCONSOLIDATION,4500,6.62,0,2021-01-09 10:34:50.664000+00:00,2021-01-09 10:34:50.664000+00:00 +24659,19601129_4223,16410,26,45000,MORTGAGE,9.0,PERSONAL,10000,16.89,0,2021-02-20 00:25:11.637000+00:00,2021-02-20 00:25:11.637000+00:00 +26328,19750409_4895,68638,27,64000,RENT,3.0,MEDICAL,18000,11.11,0,2021-01-29 17:52:34.309000+00:00,2021-01-29 17:52:34.309000+00:00 +37715,19941224_9758,35611,39,100000,MORTGAGE,0.0,VENTURE,6000,14.11,0,2020-09-06 14:36:57.884000+00:00,2020-09-06 14:36:57.884000+00:00 +24614,19790325_8007,5037,26,48000,RENT,5.0,DEBTCONSOLIDATION,25000,18.43,1,2021-02-20 14:11:07.107000+00:00,2021-02-20 14:11:07.107000+00:00 +18299,19970303_2510,71031,24,85000,RENT,0.0,HOMEIMPROVEMENT,8000,13.06,0,2021-05-12 01:55:51.424000+00:00,2021-05-12 01:55:51.424000+00:00 +32741,19550317_6465,59803,30,91000,MORTGAGE,7.0,EDUCATION,33950,7.9,0,2020-11-09 00:09:09.189000+00:00,2020-11-09 00:09:09.189000+00:00 +36674,19850917_3339,37659,36,51996,OWN,7.0,HOMEIMPROVEMENT,8500,11.49,0,2020-09-19 21:03:21.094000+00:00,2020-09-19 21:03:21.094000+00:00 +16932,19590601_1477,98036,23,55000,MORTGAGE,2.0,EDUCATION,5000,11.11,0,2021-05-29 12:05:36.486000+00:00,2021-05-29 12:05:36.486000+00:00 +32996,19521009_8624,3086,31,84000,MORTGAGE,3.0,HOMEIMPROVEMENT,25000,17.34,1,2020-11-05 18:08:54.858000+00:00,2020-11-05 18:08:54.858000+00:00 +34856,19490916_9161,92654,30,51000,RENT,14.0,PERSONAL,4800,12.69,0,2020-10-13 01:10:42.090000+00:00,2020-10-13 01:10:42.090000+00:00 +27758,19550522_5423,51453,31,41000,RENT,7.0,VENTURE,4000,10.99,0,2021-01-11 12:26:31.589000+00:00,2021-01-11 12:26:31.589000+00:00 +37358,19890126_7407,31210,48,55000,RENT,9.0,MEDICAL,10200,13.92,0,2020-09-11 03:49:17.947000+00:00,2020-09-11 03:49:17.947000+00:00 +32584,19541129_7340,29704,29,89849,MORTGAGE,13.0,EDUCATION,6000,13.98,0,2020-11-11 00:10:42.719000+00:00,2020-11-11 00:10:42.719000+00:00 +34516,19640427_3737,80654,33,300000,OWN,8.0,MEDICAL,25000,14.42,0,2020-10-17 09:11:01.198000+00:00,2020-10-17 09:11:01.198000+00:00 +25248,19821024_5625,20732,23,157500,RENT,1.0,EDUCATION,10000,12.69,0,2021-02-12 12:14:45.593000+00:00,2021-02-12 12:14:45.593000+00:00 +34463,19991223_4134,93664,34,225000,MORTGAGE,4.0,MEDICAL,3000,13.35,0,2020-10-18 01:23:46.530000+00:00,2020-10-18 01:23:46.530000+00:00 +20559,20010604_7016,98651,22,78413,MORTGAGE,6.0,EDUCATION,17500,10.65,0,2021-04-13 06:36:05.588000+00:00,2021-04-13 06:36:05.588000+00:00 +17660,19970822_5090,38915,22,30000,RENT,2.0,DEBTCONSOLIDATION,8000,11.11,1,2021-05-20 05:23:59.101000+00:00,2021-05-20 05:23:59.101000+00:00 +33236,20001204_1738,25570,29,100000,MORTGAGE,7.0,MEDICAL,15000,12.04,0,2020-11-02 16:43:59.017000+00:00,2020-11-02 16:43:59.017000+00:00 +16605,19880709_9925,10011,26,31836,RENT,1.0,VENTURE,6400,14.35,0,2021-06-02 16:07:19.569000+00:00,2021-06-02 16:07:19.569000+00:00 +28400,20000517_4791,84317,27,88000,RENT,11.0,DEBTCONSOLIDATION,9000,6.91,0,2021-01-03 08:03:20.214000+00:00,2021-01-03 08:03:20.214000+00:00 +27949,19500708_3201,75713,29,70000,RENT,2.0,PERSONAL,4500,5.42,0,2021-01-09 02:00:56.149000+00:00,2021-01-09 02:00:56.149000+00:00 +29042,19451210_3991,63436,32,36000,RENT,4.0,MEDICAL,6000,16.07,1,2020-12-26 03:40:08.839000+00:00,2020-12-26 03:40:08.839000+00:00 +21168,19901115_3595,15834,22,65400,RENT,1.0,EDUCATION,12000,13.06,0,2021-04-05 12:18:34.891000+00:00,2021-04-05 12:18:34.891000+00:00 +11015,19980918_9319,72632,24,72000,RENT,0.0,DEBTCONSOLIDATION,16875,6.54,0,2021-08-12 22:05:30.200000+00:00,2021-08-12 22:05:30.200000+00:00 +23803,20000219_1345,56501,25,39000,RENT,9.0,VENTURE,15200,16.0,1,2021-03-02 22:16:06.803000+00:00,2021-03-02 22:16:06.803000+00:00 +31056,19600625_4745,69044,29,30000,RENT,3.0,EDUCATION,10000,16.82,1,2020-11-30 11:35:26.240000+00:00,2020-11-30 11:35:26.240000+00:00 +10475,19801206_7479,61238,25,51060,RENT,9.0,VENTURE,21250,6.91,1,2021-08-19 19:16:35.842000+00:00,2021-08-19 19:16:35.842000+00:00 +23871,19501008_3936,48628,22,165500,MORTGAGE,6.0,EDUCATION,12000,12.99,0,2021-03-02 01:28:02.981000+00:00,2021-03-02 01:28:02.981000+00:00 +24420,19471228_5728,66451,22,47800,MORTGAGE,5.0,PERSONAL,1500,13.43,0,2021-02-23 01:31:46.245000+00:00,2021-02-23 01:31:46.245000+00:00 +16436,19800110_7264,14814,26,52700,MORTGAGE,7.0,PERSONAL,4000,7.4,0,2021-06-04 19:49:07.890000+00:00,2021-06-04 19:49:07.890000+00:00 +35726,19560411_8302,33849,36,127000,RENT,20.0,DEBTCONSOLIDATION,25000,14.59,1,2020-10-01 23:02:49.666000+00:00,2020-10-01 23:02:49.666000+00:00 +13363,19780309_8183,87199,22,30000,RENT,1.0,EDUCATION,4000,14.96,1,2021-07-13 23:50:35.888000+00:00,2021-07-13 23:50:35.888000+00:00 +17794,20000317_1989,13413,24,60000,MORTGAGE,8.0,MEDICAL,13000,7.29,0,2021-05-18 12:24:33.923000+00:00,2021-05-18 12:24:33.923000+00:00 +26732,19790208_2696,66866,27,69000,RENT,1.0,MEDICAL,2000,6.99,0,2021-01-24 14:17:36.310000+00:00,2021-01-24 14:17:36.310000+00:00 +25641,19520304_1262,33917,24,54000,MORTGAGE,8.0,PERSONAL,4000,9.99,0,2021-02-07 12:01:41.154000+00:00,2021-02-07 12:01:41.154000+00:00 +37947,19650829_5030,91784,43,142500,MORTGAGE,7.0,MEDICAL,20000,11.49,0,2020-09-03 15:38:51.904000+00:00,2020-09-03 15:38:51.904000+00:00 +13597,19510311_7617,14085,23,40000,MORTGAGE,3.0,MEDICAL,5000,5.42,0,2021-07-11 00:15:47.443000+00:00,2021-07-11 00:15:47.443000+00:00 +15947,19640125_6629,93422,24,50000,MORTGAGE,8.0,EDUCATION,3000,13.47,0,2021-06-11 01:24:10.666000+00:00,2021-06-11 01:24:10.666000+00:00 +17629,19800220_8140,97119,22,34000,RENT,2.0,DEBTCONSOLIDATION,8000,9.91,0,2021-05-20 14:52:57.314000+00:00,2021-05-20 14:52:57.314000+00:00 +23726,19571214_6062,92571,23,85000,RENT,7.0,PERSONAL,15000,10.99,0,2021-03-03 21:49:21.719000+00:00,2021-03-03 21:49:21.719000+00:00 +29649,19601016_7021,95666,34,54072,MORTGAGE,12.0,PERSONAL,7000,11.89,0,2020-12-18 09:59:20.608000+00:00,2020-12-18 09:59:20.608000+00:00 +28911,19650107_9702,54301,31,48000,OWN,0.0,HOMEIMPROVEMENT,20000,10.62,0,2020-12-27 19:44:30.319000+00:00,2020-12-27 19:44:30.319000+00:00 +17821,19640221_8062,26377,22,60000,MORTGAGE,6.0,VENTURE,7800,7.29,0,2021-05-18 04:09:00.641000+00:00,2021-05-18 04:09:00.641000+00:00 +26606,19541019_6076,85078,27,28500,MORTGAGE,3.0,VENTURE,1600,13.49,1,2021-01-26 04:50:11.626000+00:00,2021-01-26 04:50:11.626000+00:00 +14745,19670129_8967,84070,26,45000,MORTGAGE,3.0,EDUCATION,4000,14.35,0,2021-06-26 09:05:32.337000+00:00,2021-06-26 09:05:32.337000+00:00 +30348,20010118_5122,81526,27,60000,MORTGAGE,3.0,PERSONAL,5000,16.45,0,2020-12-09 12:09:58.971000+00:00,2020-12-09 12:09:58.971000+00:00 +32477,19980613_8887,31801,27,86004,MORTGAGE,8.0,HOMEIMPROVEMENT,16000,14.42,0,2020-11-12 08:54:34.615000+00:00,2020-11-12 08:54:34.615000+00:00 +32970,19950214_3891,37397,33,96000,MORTGAGE,0.0,VENTURE,8000,13.72,0,2020-11-06 02:06:06.908000+00:00,2020-11-06 02:06:06.908000+00:00 +24413,19480923_4101,2660,24,72000,RENT,4.0,EDUCATION,15000,10.99,0,2021-02-23 03:40:14.874000+00:00,2021-02-23 03:40:14.874000+00:00 +18816,19760804_7354,72336,22,31000,RENT,7.0,MEDICAL,9250,14.11,1,2021-05-05 11:46:54.133000+00:00,2021-05-05 11:46:54.133000+00:00 +19975,19850618_4159,68752,25,60000,RENT,9.0,VENTURE,10000,9.91,0,2021-04-20 17:14:45.468000+00:00,2021-04-20 17:14:45.468000+00:00 +30800,19860425_8873,78539,34,47700,RENT,2.0,DEBTCONSOLIDATION,9000,7.14,0,2020-12-03 17:54:01.804000+00:00,2020-12-03 17:54:01.804000+00:00 +12493,19671228_8355,32305,22,58992,RENT,0.0,MEDICAL,3000,12.84,1,2021-07-25 01:58:28.312000+00:00,2021-07-25 01:58:28.312000+00:00 +35307,19790506_1791,43521,27,19200,MORTGAGE,3.0,DEBTCONSOLIDATION,1400,16.77,1,2020-10-07 07:13:06.156000+00:00,2020-10-07 07:13:06.156000+00:00 +26818,19651116_6841,27102,30,29004,RENT,13.0,MEDICAL,2400,17.93,1,2021-01-23 11:59:10.300000+00:00,2021-01-23 11:59:10.300000+00:00 +35188,19580121_3344,89701,28,28320,RENT,4.0,VENTURE,8700,14.61,1,2020-10-08 19:37:12.843000+00:00,2020-10-08 19:37:12.843000+00:00 +33608,20010526_6233,54970,33,115000,MORTGAGE,6.0,MEDICAL,10500,13.49,0,2020-10-28 22:56:20.464000+00:00,2020-10-28 22:56:20.464000+00:00 +11733,19500219_4645,14048,26,110000,RENT,0.0,DEBTCONSOLIDATION,2100,7.12,0,2021-08-03 18:27:25.142000+00:00,2021-08-03 18:27:25.142000+00:00 +36571,19900328_1265,92110,36,38160,RENT,3.0,EDUCATION,6000,12.61,0,2020-09-21 04:33:48.059000+00:00,2020-09-21 04:33:48.059000+00:00 +25677,19461206_7518,53556,25,25716,RENT,1.0,MEDICAL,9250,6.54,1,2021-02-07 01:00:56.777000+00:00,2021-02-07 01:00:56.777000+00:00 +33570,19580705_1279,71327,29,42000,RENT,5.0,PERSONAL,15000,10.99,1,2020-10-29 10:33:47.305000+00:00,2020-10-29 10:33:47.305000+00:00 +15006,19560427_7774,54848,23,35000,RENT,7.0,PERSONAL,5400,10.59,0,2021-06-23 01:15:10.610000+00:00,2021-06-23 01:15:10.610000+00:00 +15928,19970908_9926,23703,25,57500,RENT,4.0,VENTURE,6000,11.28,1,2021-06-11 07:12:54.087000+00:00,2021-06-11 07:12:54.087000+00:00 +20564,19540129_8592,1376,23,78500,MORTGAGE,7.0,DEBTCONSOLIDATION,4800,7.14,0,2021-04-13 05:04:19.424000+00:00,2021-04-13 05:04:19.424000+00:00 +34104,19541220_1198,18518,34,135600,MORTGAGE,10.0,EDUCATION,1000,5.42,0,2020-10-22 15:12:49.059000+00:00,2020-10-22 15:12:49.059000+00:00 +31609,19650806_1345,78940,28,75000,MORTGAGE,3.0,HOMEIMPROVEMENT,15000,8.9,0,2020-11-23 10:25:44.573000+00:00,2020-11-23 10:25:44.573000+00:00 +15993,19590913_4204,62360,22,50000,OWN,6.0,MEDICAL,15000,8.63,0,2021-06-10 11:19:53.964000+00:00,2021-06-10 11:19:53.964000+00:00 +25552,19801210_5822,1072,22,33685,MORTGAGE,6.0,DEBTCONSOLIDATION,5000,13.85,0,2021-02-08 15:15:10.861000+00:00,2021-02-08 15:15:10.861000+00:00 +37796,19750118_4263,46182,38,112224,MORTGAGE,5.0,EDUCATION,25000,6.99,0,2020-09-05 13:50:18.037000+00:00,2020-09-05 13:50:18.037000+00:00 +29683,19900620_6973,71073,29,39996,RENT,3.0,PERSONAL,7000,9.32,0,2020-12-17 23:35:18.697000+00:00,2020-12-17 23:35:18.697000+00:00 +24369,19530101_9929,54888,20,188004,MORTGAGE,4.0,PERSONAL,2000,8.59,0,2021-02-23 17:07:49.111000+00:00,2021-02-23 17:07:49.111000+00:00 +34021,19520207_6672,61025,35,130000,MORTGAGE,3.0,DEBTCONSOLIDATION,9000,12.73,0,2020-10-23 16:36:11.370000+00:00,2020-10-23 16:36:11.370000+00:00 +37136,19860716_9552,62258,39,68000,MORTGAGE,1.0,PERSONAL,16000,13.49,0,2020-09-13 23:43:51.600000+00:00,2020-09-13 23:43:51.600000+00:00 +11943,19930107_1287,56368,23,75000,RENT,3.0,VENTURE,13200,6.17,0,2021-08-01 02:13:06.281000+00:00,2021-08-01 02:13:06.281000+00:00 +20871,19900406_8998,75480,24,80004,MORTGAGE,8.0,HOMEIMPROVEMENT,6000,6.17,0,2021-04-09 07:09:40.994000+00:00,2021-04-09 07:09:40.994000+00:00 +14409,19671005_8397,30224,24,48000,RENT,3.0,EDUCATION,5000,11.89,0,2021-06-30 15:52:26.515000+00:00,2021-06-30 15:52:26.515000+00:00 +18215,19970502_7007,29071,22,61500,MORTGAGE,5.0,VENTURE,30000,6.62,0,2021-05-13 03:37:34.969000+00:00,2021-05-13 03:37:34.969000+00:00 +10956,19780901_1846,80918,25,44000,RENT,6.0,EDUCATION,17500,12.53,1,2021-08-13 16:08:22.928000+00:00,2021-08-13 16:08:22.928000+00:00 +32282,19780304_8864,74431,27,26208,RENT,0.0,EDUCATION,3000,13.98,0,2020-11-14 20:33:34.985000+00:00,2020-11-14 20:33:34.985000+00:00 +34349,19541007_8325,84526,28,158367,MORTGAGE,5.0,MEDICAL,12000,6.62,0,2020-10-19 12:16:07.054000+00:00,2020-10-19 12:16:07.054000+00:00 +13782,19870617_6419,27573,22,40000,OWN,0.0,EDUCATION,6000,11.48,0,2021-07-08 15:40:19.399000+00:00,2021-07-08 15:40:19.399000+00:00 +25685,19920326_2575,97624,32,1200000,MORTGAGE,1.0,VENTURE,12000,7.51,0,2021-02-06 22:34:06.916000+00:00,2021-02-06 22:34:06.916000+00:00 +33738,19621202_3047,94114,28,50000,RENT,1.0,EDUCATION,15000,10.28,0,2020-10-27 07:10:20.216000+00:00,2020-10-27 07:10:20.216000+00:00 +19983,19491225_7277,27973,23,74000,MORTGAGE,4.0,VENTURE,20000,11.48,0,2021-04-20 14:47:55.606000+00:00,2021-04-20 14:47:55.606000+00:00 +19107,19920613_7381,97411,22,23000,RENT,6.0,MEDICAL,10000,9.88,1,2021-05-01 18:45:55.426000+00:00,2021-05-01 18:45:55.426000+00:00 +37328,19560903_8682,40066,43,80000,RENT,5.0,HOMEIMPROVEMENT,10000,11.49,0,2020-09-11 12:59:54.927000+00:00,2020-09-11 12:59:54.927000+00:00 +33714,19460507_1447,47640,29,120000,MORTGAGE,7.0,DEBTCONSOLIDATION,20000,12.42,0,2020-10-27 14:30:49.800000+00:00,2020-10-27 14:30:49.800000+00:00 +18772,19840716_9517,5158,26,29000,RENT,10.0,DEBTCONSOLIDATION,9200,7.49,1,2021-05-06 01:14:28.371000+00:00,2021-05-06 01:14:28.371000+00:00 +29229,20000619_8396,12992,30,50000,MORTGAGE,3.0,HOMEIMPROVEMENT,5000,10.95,0,2020-12-23 18:27:58.330000+00:00,2020-12-23 18:27:58.330000+00:00 +16817,19880401_9015,18929,23,54996,MORTGAGE,7.0,PERSONAL,8000,14.35,0,2021-05-30 23:16:18.243000+00:00,2021-05-30 23:16:18.243000+00:00 +19123,19670902_1880,77903,23,24960,RENT,0.0,PERSONAL,10000,9.64,1,2021-05-01 13:52:15.703000+00:00,2021-05-01 13:52:15.703000+00:00 +34113,19760129_4733,28741,28,124886,MORTGAGE,12.0,HOMEIMPROVEMENT,5000,11.99,1,2020-10-22 12:27:37.965000+00:00,2020-10-22 12:27:37.965000+00:00 +17446,19750123_7667,24343,23,58000,OWN,4.0,MEDICAL,6400,13.48,0,2021-05-22 22:51:42.893000+00:00,2021-05-22 22:51:42.893000+00:00 +13332,19760912_2249,48120,23,38400,OWN,7.0,PERSONAL,6000,13.24,0,2021-07-14 09:19:34.101000+00:00,2021-07-14 09:19:34.101000+00:00 +36020,19980718_7492,64502,47,72000,RENT,5.0,VENTURE,2500,15.05,0,2020-09-28 05:06:47.261000+00:00,2020-09-28 05:06:47.261000+00:00 +29441,19730321_1883,29822,27,52000,MORTGAGE,6.0,PERSONAL,11500,7.14,0,2020-12-21 01:36:57.004000+00:00,2020-12-21 01:36:57.004000+00:00 +31847,19851103_8214,78411,30,77000,MORTGAGE,4.0,DEBTCONSOLIDATION,8000,7.14,0,2020-11-20 09:37:31.197000+00:00,2020-11-20 09:37:31.197000+00:00 +25936,19721225_5495,52229,29,92200,RENT,7.0,VENTURE,25000,14.96,0,2021-02-03 17:47:17.516000+00:00,2021-02-03 17:47:17.516000+00:00 +14161,19591204_6177,48317,22,32968,RENT,2.0,DEBTCONSOLIDATION,5000,14.61,1,2021-07-03 19:44:12.217000+00:00,2021-07-03 19:44:12.217000+00:00 +25012,19681114_6916,38039,23,30000,RENT,8.0,EDUCATION,10625,6.92,1,2021-02-15 12:26:16.504000+00:00,2021-02-15 12:26:16.504000+00:00 +29530,19630417_2688,33567,29,53000,MORTGAGE,13.0,DEBTCONSOLIDATION,10750,13.11,0,2020-12-19 22:23:27.296000+00:00,2020-12-19 22:23:27.296000+00:00 +10363,20010523_1924,17225,23,47004,RENT,1.0,VENTURE,24000,11.58,1,2021-08-21 05:32:13.901000+00:00,2021-08-21 05:32:13.901000+00:00 +17991,19520918_9666,15539,25,21000,RENT,0.0,DEBTCONSOLIDATION,5175,12.53,1,2021-05-16 00:08:51.087000+00:00,2021-05-16 00:08:51.087000+00:00 +12178,19870416_6117,64720,26,32450,MORTGAGE,5.0,MEDICAL,3250,6.99,0,2021-07-29 02:19:56.604000+00:00,2021-07-29 02:19:56.604000+00:00 +34611,19520313_7280,61410,29,117000,RENT,6.0,EDUCATION,21000,10.65,0,2020-10-16 04:07:24.095000+00:00,2020-10-16 04:07:24.095000+00:00 +11426,19461118_1378,32446,26,51996,RENT,10.0,VENTURE,1500,13.85,0,2021-08-07 16:22:03.572000+00:00,2021-08-07 16:22:03.572000+00:00 +18107,19710213_4280,77502,25,24000,RENT,3.0,EDUCATION,5000,6.92,0,2021-05-14 12:39:48.097000+00:00,2021-05-14 12:39:48.097000+00:00 +30949,19720715_7529,36541,34,60000,MORTGAGE,18.0,EDUCATION,3000,11.99,1,2020-12-01 20:19:18.136000+00:00,2020-12-01 20:19:18.136000+00:00 +38395,19550816_5423,93612,53,304800,RENT,6.0,VENTURE,5000,8.88,0,2020-08-28 22:36:19.667000+00:00,2020-08-28 22:36:19.667000+00:00 +25102,19831011_8872,49456,25,51492,MORTGAGE,2.0,EDUCATION,13800,10.38,0,2021-02-14 08:54:25.563000+00:00,2021-02-14 08:54:25.563000+00:00 +37402,19521016_7977,10933,36,80000,MORTGAGE,3.0,VENTURE,12025,10.65,0,2020-09-10 14:21:43.710000+00:00,2020-09-10 14:21:43.710000+00:00 +38484,19861012_5786,28751,60,49008,OWN,5.0,PERSONAL,12000,13.79,0,2020-08-27 19:22:49.960000+00:00,2020-08-27 19:22:49.960000+00:00 +32963,19980608_1974,60188,29,71400,MORTGAGE,1.0,MEDICAL,8000,8.49,1,2020-11-06 04:14:35.536000+00:00,2020-11-06 04:14:35.536000+00:00 +16412,19500615_6768,23459,21,43000,RENT,5.0,EDUCATION,6500,12.73,0,2021-06-05 03:09:37.474000+00:00,2021-06-05 03:09:37.474000+00:00 +18692,19651012_8609,73502,23,65000,MORTGAGE,5.0,EDUCATION,4000,11.49,0,2021-05-07 01:42:46.985000+00:00,2021-05-07 01:42:46.985000+00:00 +32118,19790817_5102,48708,28,80000,MORTGAGE,8.0,EDUCATION,13000,13.47,0,2020-11-16 22:43:37.143000+00:00,2020-11-16 22:43:37.143000+00:00 +19662,19790905_9453,14805,23,55000,RENT,7.0,MEDICAL,4200,9.91,0,2021-04-24 16:59:31.294000+00:00,2021-04-24 16:59:31.294000+00:00 +28271,19740703_6655,98247,28,42000,RENT,4.0,MEDICAL,5000,5.42,0,2021-01-04 23:30:59.229000+00:00,2021-01-04 23:30:59.229000+00:00 +24345,19820504_8810,2135,22,38000,MORTGAGE,6.0,DEBTCONSOLIDATION,8500,5.42,0,2021-02-24 00:28:18.695000+00:00,2021-02-24 00:28:18.695000+00:00 +30899,19670830_6170,57106,29,77040,RENT,1.0,EDUCATION,9500,12.84,0,2020-12-02 11:36:59.769000+00:00,2020-12-02 11:36:59.769000+00:00 +10009,19780808_9123,71260,21,10000,OWN,6.0,VENTURE,1600,14.74,1,2021-08-25 17:49:30.267000+00:00,2021-08-25 17:49:30.267000+00:00 +27472,19570725_4176,68841,28,69996,RENT,1.0,VENTURE,11000,13.79,0,2021-01-15 03:55:44.133000+00:00,2021-01-15 03:55:44.133000+00:00 +29246,20011202_2398,27314,29,74000,RENT,2.0,HOMEIMPROVEMENT,6000,5.42,0,2020-12-23 13:15:57.375000+00:00,2020-12-23 13:15:57.375000+00:00 +23779,19971008_4402,88013,25,150000,MORTGAGE,9.0,MEDICAL,12125,8.7,0,2021-03-03 05:36:36.387000+00:00,2021-03-03 05:36:36.387000+00:00 +17774,19680521_1163,72016,26,43000,RENT,1.0,MEDICAL,8000,12.87,0,2021-05-18 18:31:38.577000+00:00,2021-05-18 18:31:38.577000+00:00 +26065,19850929_2789,70339,28,58704,RENT,5.0,DEBTCONSOLIDATION,23500,14.72,1,2021-02-02 02:19:38.501000+00:00,2021-02-02 02:19:38.501000+00:00 +35124,19500503_4897,13039,27,80000,MORTGAGE,11.0,VENTURE,5000,8.0,0,2020-10-09 15:11:51.734000+00:00,2020-10-09 15:11:51.734000+00:00 +16810,19531112_6653,93654,24,37000,RENT,2.0,MEDICAL,7000,7.51,0,2021-05-31 01:24:46.871000+00:00,2021-05-31 01:24:46.871000+00:00 +37197,19730307_1277,46360,45,36000,RENT,1.0,VENTURE,4300,10.91,0,2020-09-13 05:04:16.407000+00:00,2020-09-13 05:04:16.407000+00:00 +24803,19990102_7440,24067,22,54000,MORTGAGE,5.0,MEDICAL,7000,10.65,0,2021-02-18 04:22:14.132000+00:00,2021-02-18 04:22:14.132000+00:00 +36314,19761209_5898,32462,41,121200,RENT,1.0,VENTURE,10000,6.03,0,2020-09-24 11:10:44.856000+00:00,2020-09-24 11:10:44.856000+00:00 +31986,19991109_2807,47404,27,38500,RENT,0.0,DEBTCONSOLIDATION,11500,12.42,0,2020-11-18 15:06:19.856000+00:00,2020-11-18 15:06:19.856000+00:00 +36830,19561212_2700,78557,38,64800,RENT,15.0,PERSONAL,7500,9.63,0,2020-09-17 21:20:08.798000+00:00,2020-09-17 21:20:08.798000+00:00 +14350,19520113_9018,62201,25,43000,MORTGAGE,5.0,DEBTCONSOLIDATION,10000,7.88,0,2021-07-01 09:55:19.242000+00:00,2021-07-01 09:55:19.242000+00:00 +36261,19920311_4228,29909,36,40000,OWN,2.0,PERSONAL,5000,7.88,0,2020-09-25 03:23:30.187000+00:00,2020-09-25 03:23:30.187000+00:00 +25873,19520329_4764,30031,35,192000,MORTGAGE,11.0,HOMEIMPROVEMENT,10000,11.99,0,2021-02-04 13:03:35.174000+00:00,2021-02-04 13:03:35.174000+00:00 +14128,19770819_6548,18224,21,30000,RENT,3.0,VENTURE,5000,13.92,0,2021-07-04 05:49:52.895000+00:00,2021-07-04 05:49:52.895000+00:00 +38552,19650412_2624,93160,55,34000,RENT,0.0,DEBTCONSOLIDATION,12000,6.92,1,2020-08-26 22:34:46.138000+00:00,2020-08-26 22:34:46.138000+00:00 +14867,19950326_6863,16748,25,76000,RENT,1.0,DEBTCONSOLIDATION,5000,7.9,0,2021-06-24 19:46:21.951000+00:00,2021-06-24 19:46:21.951000+00:00 +37447,19990913_6732,47449,44,80160,MORTGAGE,13.0,DEBTCONSOLIDATION,5500,13.92,0,2020-09-10 00:35:48.240000+00:00,2020-09-10 00:35:48.240000+00:00 +26458,19870230_8723,10023,35,44000,RENT,3.0,PERSONAL,16000,13.99,1,2021-01-28 02:06:34.061000+00:00,2021-01-28 02:06:34.061000+00:00 +17077,19521111_8053,61370,25,52000,RENT,3.0,EDUCATION,7200,12.69,0,2021-05-27 15:44:17.748000+00:00,2021-05-27 15:44:17.748000+00:00 +23828,19840927_3367,54889,25,144996,MORTGAGE,9.0,DEBTCONSOLIDATION,17600,14.84,1,2021-03-02 14:37:15.986000+00:00,2021-03-02 14:37:15.986000+00:00 +30197,19890607_9666,46311,31,37681,RENT,16.0,DEBTCONSOLIDATION,8000,10.99,0,2020-12-11 10:21:25.104000+00:00,2020-12-11 10:21:25.104000+00:00 +22543,19940220_4202,29129,24,70000,RENT,5.0,HOMEIMPROVEMENT,14400,12.23,0,2021-03-18 23:41:59.968000+00:00,2021-03-18 23:41:59.968000+00:00 +28798,19861023_4485,72089,31,30000,RENT,1.0,DEBTCONSOLIDATION,5600,7.74,0,2020-12-29 06:18:29.611000+00:00,2020-12-29 06:18:29.611000+00:00 +17195,19710412_8079,19545,22,25000,RENT,0.0,PERSONAL,7500,7.51,0,2021-05-26 03:38:32.293000+00:00,2021-05-26 03:38:32.293000+00:00 +13615,19791213_6708,30341,25,40000,MORTGAGE,9.0,DEBTCONSOLIDATION,3500,13.49,0,2021-07-10 18:45:25.255000+00:00,2021-07-10 18:45:25.255000+00:00 +26278,19510824_6848,76107,29,14400,RENT,3.0,EDUCATION,1000,16.32,1,2021-01-30 09:10:15.942000+00:00,2021-01-30 09:10:15.942000+00:00 +20708,20010723_8521,53929,23,80000,MORTGAGE,6.0,EDUCATION,9000,7.49,0,2021-04-11 09:01:21.920000+00:00,2021-04-11 09:01:21.920000+00:00 +16799,19990822_3741,4761,24,54996,MORTGAGE,8.0,VENTURE,5500,5.99,0,2021-05-31 04:46:40.431000+00:00,2021-05-31 04:46:40.431000+00:00 +15596,19620322_7692,22315,26,49995,MORTGAGE,10.0,EDUCATION,10000,11.71,0,2021-06-15 12:46:23.334000+00:00,2021-06-15 12:46:23.334000+00:00 +31149,19891102_5641,83332,27,69830,OWN,3.0,EDUCATION,15000,16.45,0,2020-11-29 07:08:31.602000+00:00,2020-11-29 07:08:31.602000+00:00 +12385,20000924_1583,98445,24,53000,RENT,6.0,VENTURE,12000,11.11,0,2021-07-26 11:00:41.441000+00:00,2021-07-26 11:00:41.441000+00:00 +31249,19680402_2654,91320,33,43337,RENT,17.0,PERSONAL,10000,6.92,0,2020-11-28 00:33:08.335000+00:00,2020-11-28 00:33:08.335000+00:00 +20340,19590528_8241,32837,23,83000,RENT,3.0,MEDICAL,10000,6.54,0,2021-04-16 01:35:35.543000+00:00,2021-04-16 01:35:35.543000+00:00 +24679,19700814_6341,80034,24,30000,RENT,8.0,MEDICAL,8000,9.25,0,2021-02-19 18:18:06.983000+00:00,2021-02-19 18:18:06.983000+00:00 +22753,19820623_5294,36343,23,37000,RENT,2.0,PERSONAL,2000,11.14,0,2021-03-16 07:27:41.108000+00:00,2021-03-16 07:27:41.108000+00:00 +36934,19910319_8730,15131,40,60000,MORTGAGE,1.0,VENTURE,4800,14.59,0,2020-09-16 13:31:20.600000+00:00,2020-09-16 13:31:20.600000+00:00 +25389,19680426_8272,24077,25,70000,MORTGAGE,6.0,VENTURE,14000,5.42,0,2021-02-10 17:06:51.787000+00:00,2021-02-10 17:06:51.787000+00:00 +27590,19560401_5451,55943,27,70000,RENT,9.0,MEDICAL,3600,14.79,0,2021-01-13 15:49:58.678000+00:00,2021-01-13 15:49:58.678000+00:00 +27477,19830129_6638,96141,27,83604,RENT,2.0,MEDICAL,11000,9.76,0,2021-01-15 02:23:57.970000+00:00,2021-01-15 02:23:57.970000+00:00 +10826,19500916_9213,79529,25,46000,RENT,8.0,DEBTCONSOLIDATION,18250,14.26,1,2021-08-15 07:54:23.175000+00:00,2021-08-15 07:54:23.175000+00:00 +34675,19640826_2701,10010,29,99000,RENT,3.0,PERSONAL,8000,12.99,0,2020-10-15 08:32:45.204000+00:00,2020-10-15 08:32:45.204000+00:00 +31617,19950614_8318,28722,27,75000,MORTGAGE,6.0,PERSONAL,3500,6.03,0,2020-11-23 07:58:54.712000+00:00,2020-11-23 07:58:54.712000+00:00 +28620,19760924_6012,67561,31,39000,MORTGAGE,0.0,HOMEIMPROVEMENT,12000,13.3,1,2020-12-31 12:45:29.027000+00:00,2020-12-31 12:45:29.027000+00:00 +34631,19580401_1779,66724,28,64900,MORTGAGE,12.0,DEBTCONSOLIDATION,25000,15.37,1,2020-10-15 22:00:19.441000+00:00,2020-10-15 22:00:19.441000+00:00 +28909,19620801_1015,59487,31,24000,RENT,3.0,VENTURE,6000,8.49,0,2020-12-27 20:21:12.785000+00:00,2020-12-27 20:21:12.785000+00:00 +23168,19730420_2738,89049,26,50004,RENT,3.0,VENTURE,15000,13.72,0,2021-03-11 00:30:49.549000+00:00,2021-03-11 00:30:49.549000+00:00 +17383,19810519_4844,17403,25,74004,RENT,2.0,DEBTCONSOLIDATION,7500,12.61,1,2021-05-23 18:08:00.551000+00:00,2021-05-23 18:08:00.551000+00:00 +30021,19970825_2224,13338,32,155000,RENT,3.0,PERSONAL,7500,11.59,0,2020-12-13 16:11:42.055000+00:00,2020-12-13 16:11:42.055000+00:00 +37117,19850316_6074,60959,37,60000,RENT,0.0,VENTURE,9700,13.61,0,2020-09-14 05:32:35.021000+00:00,2020-09-14 05:32:35.021000+00:00 +19770,19951029_6112,27563,24,72000,MORTGAGE,2.0,EDUCATION,6000,16.77,0,2021-04-23 07:57:18.165000+00:00,2021-04-23 07:57:18.165000+00:00 +24257,19511002_4158,29150,25,74000,OWN,1.0,MEDICAL,16000,8.49,0,2021-02-25 03:23:27.170000+00:00,2021-02-25 03:23:27.170000+00:00 +35711,19560406_9865,89053,37,175000,RENT,2.0,HOMEIMPROVEMENT,15000,9.76,0,2020-10-02 03:38:08.156000+00:00,2020-10-02 03:38:08.156000+00:00 +36382,19850307_1167,69156,47,43200,MORTGAGE,8.0,HOMEIMPROVEMENT,13750,14.84,0,2020-09-23 14:22:41.034000+00:00,2020-09-23 14:22:41.034000+00:00 +19011,19480901_2295,33677,23,66000,MORTGAGE,7.0,PERSONAL,10000,9.63,0,2021-05-03 00:07:53.762000+00:00,2021-05-03 00:07:53.762000+00:00 +28146,19561029_3031,7848,31,20800,RENT,3.0,MEDICAL,9750,17.19,1,2021-01-06 13:45:13.313000+00:00,2021-01-06 13:45:13.313000+00:00 +18191,19840122_6705,51054,26,65000,RENT,4.0,PERSONAL,8000,6.92,0,2021-05-13 10:58:04.553000+00:00,2021-05-13 10:58:04.553000+00:00 +34020,19851126_2947,51201,27,61584,RENT,3.0,VENTURE,15000,15.21,0,2020-10-23 16:54:32.603000+00:00,2020-10-23 16:54:32.603000+00:00 +32390,19680828_7648,93243,27,85000,MORTGAGE,11.0,PERSONAL,10000,7.49,0,2020-11-13 11:31:21.857000+00:00,2020-11-13 11:31:21.857000+00:00 +12464,19640720_7331,95367,24,57000,RENT,1.0,MEDICAL,3000,7.88,0,2021-07-25 10:50:44.060000+00:00,2021-07-25 10:50:44.060000+00:00 +24996,19800923_7230,75283,24,85000,MORTGAGE,2.0,MEDICAL,7000,12.69,0,2021-02-15 17:19:56.226000+00:00,2021-02-15 17:19:56.226000+00:00 +24185,19500730_9268,13406,23,30000,RENT,0.0,DEBTCONSOLIDATION,5000,11.86,0,2021-02-26 01:24:55.923000+00:00,2021-02-26 01:24:55.923000+00:00 +18171,19760127_6663,46142,26,55000,MORTGAGE,4.0,DEBTCONSOLIDATION,10000,7.51,1,2021-05-13 17:05:09.206000+00:00,2021-05-13 17:05:09.206000+00:00 +11160,19780524_1657,83847,23,26000,MORTGAGE,3.0,DEBTCONSOLIDATION,7800,10.59,0,2021-08-11 01:44:11.463000+00:00,2021-08-11 01:44:11.463000+00:00 +20892,19921003_2244,1451,22,80340,OWN,2.0,EDUCATION,7000,7.14,0,2021-04-09 00:44:15.108000+00:00,2021-04-09 00:44:15.108000+00:00 +18815,19720629_8145,8010,22,65000,MORTGAGE,2.0,PERSONAL,13000,14.35,0,2021-05-05 12:05:15.366000+00:00,2021-05-05 12:05:15.366000+00:00 +27338,19570408_3315,25512,27,22800,RENT,5.0,PERSONAL,3500,13.99,1,2021-01-16 20:55:09.311000+00:00,2021-01-16 20:55:09.311000+00:00 +21058,19681202_9762,12095,26,65450,MORTGAGE,3.0,MEDICAL,10800,12.21,1,2021-04-06 21:57:30.485000+00:00,2021-04-06 21:57:30.485000+00:00 +31687,19541126_4345,72701,27,70000,RENT,11.0,MEDICAL,10000,10.65,0,2020-11-22 10:34:08.425000+00:00,2020-11-22 10:34:08.425000+00:00 +36001,19580204_4513,16121,39,62692,RENT,9.0,VENTURE,13200,9.88,0,2020-09-28 10:55:30.682000+00:00,2020-09-28 10:55:30.682000+00:00 +10929,19771126_8304,96779,22,125000,RENT,3.0,MEDICAL,18000,14.79,0,2021-08-14 00:23:56.210000+00:00,2021-08-14 00:23:56.210000+00:00 +32036,19641126_8587,92832,27,80000,MORTGAGE,6.0,MEDICAL,23750,10.99,0,2020-11-17 23:48:38.222000+00:00,2020-11-17 23:48:38.222000+00:00 +25819,19781222_8337,10280,27,204000,MORTGAGE,3.0,HOMEIMPROVEMENT,25000,15.31,0,2021-02-05 05:34:41.738000+00:00,2021-02-05 05:34:41.738000+00:00 +35762,19961013_4086,27894,43,72000,RENT,11.0,EDUCATION,22000,10.62,1,2020-10-01 12:02:05.290000+00:00,2020-10-01 12:02:05.290000+00:00 +36975,19580926_5424,35186,45,61000,OWN,7.0,MEDICAL,20000,13.22,0,2020-09-16 00:58:50.060000+00:00,2020-09-16 00:58:50.060000+00:00 +19621,19990918_8053,56258,22,70000,MORTGAGE,3.0,EDUCATION,18000,14.42,0,2021-04-25 05:32:01.833000+00:00,2021-04-25 05:32:01.833000+00:00 +34073,19980721_1321,91016,29,135000,MORTGAGE,14.0,MEDICAL,24000,5.99,0,2020-10-23 00:41:47.272000+00:00,2020-10-23 00:41:47.272000+00:00 +34677,19701025_7628,14513,35,85000,RENT,1.0,VENTURE,3600,7.51,0,2020-10-15 07:56:02.738000+00:00,2020-10-15 07:56:02.738000+00:00 +16697,19760414_2750,84056,24,54000,MORTGAGE,9.0,MEDICAL,15600,13.61,0,2021-06-01 11:58:46.163000+00:00,2021-06-01 11:58:46.163000+00:00 +31990,19900924_2879,90014,29,79894,MORTGAGE,7.0,VENTURE,20000,14.96,0,2020-11-18 13:52:54.925000+00:00,2020-11-18 13:52:54.925000+00:00 +21842,19580604_5865,29417,22,92000,OTHER,2.0,MEDICAL,6000,12.22,0,2021-03-27 22:08:04.071000+00:00,2021-03-27 22:08:04.071000+00:00 +10281,19641127_9743,18327,25,134300,RENT,3.0,MEDICAL,5000,16.49,1,2021-08-22 06:37:14.980000+00:00,2021-08-22 06:37:14.980000+00:00 +32754,19900704_3938,25652,32,67150,MORTGAGE,4.0,MEDICAL,13000,16.0,1,2020-11-08 20:10:33.165000+00:00,2020-11-08 20:10:33.165000+00:00 +13435,19690305_6002,23432,25,39600,MORTGAGE,5.0,HOMEIMPROVEMENT,4800,12.42,0,2021-07-13 01:49:07.136000+00:00,2021-07-13 01:49:07.136000+00:00 +12092,20001029_5071,95310,26,64000,RENT,3.0,MEDICAL,2500,10.99,0,2021-07-30 04:38:22.613000+00:00,2021-07-30 04:38:22.613000+00:00 +29332,19960918_6100,27906,33,45000,MORTGAGE,3.0,HOMEIMPROVEMENT,8000,16.49,1,2020-12-22 10:57:31.365000+00:00,2020-12-22 10:57:31.365000+00:00 +35584,19620909_9491,38943,27,140987,MORTGAGE,5.0,VENTURE,6450,5.99,0,2020-10-03 18:29:04.706000+00:00,2020-10-03 18:29:04.706000+00:00 +13314,19660520_4478,8608,24,38400,MORTGAGE,7.0,EDUCATION,10000,14.72,0,2021-07-14 14:49:56.289000+00:00,2021-07-14 14:49:56.289000+00:00 +32831,19930923_3827,74957,30,94000,MORTGAGE,13.0,EDUCATION,10000,14.27,0,2020-11-07 20:37:18.249000+00:00,2020-11-07 20:37:18.249000+00:00 +26075,19910121_9862,49861,29,46000,RENT,1.0,DEBTCONSOLIDATION,22750,14.46,1,2021-02-01 23:16:06.174000+00:00,2021-02-01 23:16:06.174000+00:00 +37894,19500625_4818,11731,38,130000,MORTGAGE,1.0,HOMEIMPROVEMENT,30000,9.91,0,2020-09-04 07:51:37.236000+00:00,2020-09-04 07:51:37.236000+00:00 +11818,19561030_3762,43006,25,22800,RENT,1.0,MEDICAL,2400,10.36,0,2021-08-02 16:27:20.365000+00:00,2021-08-02 16:27:20.365000+00:00 +13906,19450828_7909,24260,23,52000,RENT,3.0,EDUCATION,4800,9.88,0,2021-07-07 01:44:26.548000+00:00,2021-07-07 01:44:26.548000+00:00 +14336,19620824_6416,96733,22,43200,RENT,6.0,EDUCATION,5000,12.69,0,2021-07-01 14:12:16.500000+00:00,2021-07-01 14:12:16.500000+00:00 +34300,19690121_9375,73062,28,92000,RENT,4.0,DEBTCONSOLIDATION,15000,7.9,0,2020-10-20 03:15:27.455000+00:00,2020-10-20 03:15:27.455000+00:00 +24229,19460328_3544,16407,24,54996,RENT,0.0,DEBTCONSOLIDATION,12500,12.87,0,2021-02-25 11:57:21.685000+00:00,2021-02-25 11:57:21.685000+00:00 +17674,19800608_3306,7070,26,36903,RENT,3.0,DEBTCONSOLIDATION,8000,12.21,0,2021-05-20 01:07:01.844000+00:00,2021-05-20 01:07:01.844000+00:00 +21305,19501016_9440,39365,24,36708,RENT,2.0,HOMEIMPROVEMENT,3000,15.33,1,2021-04-03 18:24:06.015000+00:00,2021-04-03 18:24:06.015000+00:00 +17884,20010404_9879,64024,24,52000,RENT,1.0,EDUCATION,8000,11.78,0,2021-05-17 08:52:42.983000+00:00,2021-05-17 08:52:42.983000+00:00 +13945,19630320_8557,58622,24,103641,RENT,8.0,VENTURE,4800,10.74,0,2021-07-06 13:48:38.474000+00:00,2021-07-06 13:48:38.474000+00:00 +16694,19540401_1520,91748,23,54000,MORTGAGE,0.0,EDUCATION,10000,11.49,0,2021-06-01 12:53:49.861000+00:00,2021-06-01 12:53:49.861000+00:00 +37450,20001128_1271,31643,36,81000,MORTGAGE,9.0,HOMEIMPROVEMENT,6000,12.99,0,2020-09-09 23:40:44.542000+00:00,2020-09-09 23:40:44.542000+00:00 +16586,19900729_5815,74728,25,29120,RENT,0.0,HOMEIMPROVEMENT,6400,16.29,1,2021-06-02 21:56:02.990000+00:00,2021-06-02 21:56:02.990000+00:00 +14980,19940130_5797,29572,23,23340,RENT,8.0,EDUCATION,5300,7.9,0,2021-06-23 09:12:22.659000+00:00,2021-06-23 09:12:22.659000+00:00 +38564,19460921_2514,45773,63,1782000,RENT,13.0,EDUCATION,12025,14.27,0,2020-08-26 18:54:31.346000+00:00,2020-08-26 18:54:31.346000+00:00 +18974,19680723_9170,70807,24,66000,MORTGAGE,8.0,PERSONAL,15000,18.25,0,2021-05-03 11:26:59.371000+00:00,2021-05-03 11:26:59.371000+00:00 +15354,20010711_5288,29730,24,24000,RENT,1.0,MEDICAL,6000,11.99,0,2021-06-18 14:48:01.640000+00:00,2021-06-18 14:48:01.640000+00:00 +16186,19820319_5187,64136,22,60000,RENT,5.0,MEDICAL,7000,9.99,0,2021-06-08 00:17:36.058000+00:00,2021-06-08 00:17:36.058000+00:00 +36937,19640424_1652,18254,38,60000,OWN,2.0,VENTURE,6000,14.96,0,2020-09-16 12:36:16.902000+00:00,2020-09-16 12:36:16.902000+00:00 +29577,19941130_4884,50845,34,54000,MORTGAGE,5.0,HOMEIMPROVEMENT,27000,7.9,0,2020-12-19 08:00:49.360000+00:00,2020-12-19 08:00:49.360000+00:00 +38091,19840917_3681,70606,40,38000,RENT,4.0,DEBTCONSOLIDATION,7000,15.99,1,2020-09-01 19:35:54.399000+00:00,2020-09-01 19:35:54.399000+00:00 +32227,19790328_6095,73506,30,60000,RENT,0.0,PERSONAL,12000,11.71,0,2020-11-15 13:23:02.782000+00:00,2020-11-15 13:23:02.782000+00:00 +18763,19460130_9199,72768,26,45000,RENT,2.0,DEBTCONSOLIDATION,9050,7.29,0,2021-05-06 03:59:39.465000+00:00,2021-05-06 03:59:39.465000+00:00 +34734,19680928_2397,98685,34,84000,RENT,3.0,EDUCATION,9000,5.79,0,2020-10-14 14:29:52.476000+00:00,2020-10-14 14:29:52.476000+00:00 +34647,19590306_7046,30276,32,122004,MORTGAGE,13.0,MEDICAL,10000,8.59,0,2020-10-15 17:06:39.718000+00:00,2020-10-15 17:06:39.718000+00:00 +35691,19980402_5345,77831,36,69000,RENT,0.0,HOMEIMPROVEMENT,25000,14.59,1,2020-10-02 09:45:12.810000+00:00,2020-10-02 09:45:12.810000+00:00 +30795,19791014_9418,32204,31,65000,MORTGAGE,6.0,VENTURE,12000,7.49,0,2020-12-03 19:25:47.967000+00:00,2020-12-03 19:25:47.967000+00:00 +37027,19730922_2834,31052,36,63599,MORTGAGE,10.0,HOMEIMPROVEMENT,5200,7.91,0,2020-09-15 09:04:25.961000+00:00,2020-09-15 09:04:25.961000+00:00 +28062,19730909_3354,53716,34,96900,RENT,1.0,DEBTCONSOLIDATION,4800,5.42,0,2021-01-07 15:26:56.857000+00:00,2021-01-07 15:26:56.857000+00:00 +22232,19640601_3524,32192,25,86000,MORTGAGE,9.0,HOMEIMPROVEMENT,5000,10.36,1,2021-03-22 22:50:03.329000+00:00,2021-03-22 22:50:03.329000+00:00 +34199,19490125_2907,40228,34,143150,MORTGAGE,0.0,VENTURE,12000,9.32,0,2020-10-21 10:09:11.955000+00:00,2020-10-21 10:09:11.955000+00:00 +11548,19551210_7764,34674,23,19200,RENT,2.0,EDUCATION,2000,10.59,0,2021-08-06 03:02:53.186000+00:00,2021-08-06 03:02:53.186000+00:00 +22706,19971219_3037,14504,25,107000,MORTGAGE,4.0,DEBTCONSOLIDATION,24000,5.99,0,2021-03-16 21:50:19.043000+00:00,2021-03-16 21:50:19.043000+00:00 +20153,19610902_5287,50854,24,67600,RENT,7.0,HOMEIMPROVEMENT,10000,16.02,0,2021-04-18 10:47:46.052000+00:00,2021-04-18 10:47:46.052000+00:00 +30856,20000221_8382,30274,27,65000,MORTGAGE,3.0,DEBTCONSOLIDATION,15000,10.75,0,2020-12-03 00:46:12.774000+00:00,2020-12-03 00:46:12.774000+00:00 +38054,19881228_5956,97339,45,54000,RENT,1.0,DEBTCONSOLIDATION,19425,11.99,1,2020-09-02 06:55:00.008000+00:00,2020-09-02 06:55:00.008000+00:00 +22588,19621020_7458,36702,26,130000,RENT,3.0,VENTURE,14400,14.46,0,2021-03-18 09:56:04.498000+00:00,2021-03-18 09:56:04.498000+00:00 +33510,19970312_2783,16674,34,35000,RENT,2.0,PERSONAL,15000,10.99,1,2020-10-30 04:55:01.265000+00:00,2020-10-30 04:55:01.265000+00:00 +34970,19911022_6935,34609,28,68000,MORTGAGE,7.0,HOMEIMPROVEMENT,2500,11.71,0,2020-10-11 14:18:21.566000+00:00,2020-10-11 14:18:21.566000+00:00 +17411,19520303_2944,54968,25,58000,MORTGAGE,7.0,DEBTCONSOLIDATION,7200,11.71,0,2021-05-23 09:34:06.036000+00:00,2021-05-23 09:34:06.036000+00:00 +17958,19840127_8710,70127,22,60000,RENT,6.0,MEDICAL,8000,7.88,0,2021-05-16 10:14:31.765000+00:00,2021-05-16 10:14:31.765000+00:00 +36569,19510521_1606,89301,44,49510,MORTGAGE,5.0,HOMEIMPROVEMENT,4000,10.62,0,2020-09-21 05:10:30.525000+00:00,2020-09-21 05:10:30.525000+00:00 +20030,19541127_6618,30218,23,75000,MORTGAGE,7.0,VENTURE,5400,12.69,0,2021-04-20 00:25:17.671000+00:00,2021-04-20 00:25:17.671000+00:00 +14113,19990513_2356,48174,23,29730,RENT,2.0,DEBTCONSOLIDATION,5000,7.66,0,2021-07-04 10:25:11.385000+00:00,2021-07-04 10:25:11.385000+00:00 +12175,19710310_3389,68757,24,18000,RENT,5.0,PERSONAL,3000,15.27,1,2021-07-29 03:15:00.302000+00:00,2021-07-29 03:15:00.302000+00:00 +12515,19970330_2150,5733,24,35000,MORTGAGE,7.0,EDUCATION,15000,7.49,0,2021-07-24 19:14:41.193000+00:00,2021-07-24 19:14:41.193000+00:00 +25074,19501111_8936,42202,21,37000,OWN,3.0,MEDICAL,7200,13.61,0,2021-02-14 17:28:20.078000+00:00,2021-02-14 17:28:20.078000+00:00 +30844,19970519_8461,14819,28,62000,RENT,3.0,MEDICAL,9200,10.0,0,2020-12-03 04:26:27.566000+00:00,2020-12-03 04:26:27.566000+00:00 +15071,19690708_2092,18347,26,47000,MORTGAGE,5.0,MEDICAL,6600,7.9,0,2021-06-22 05:22:10.486000+00:00,2021-06-22 05:22:10.486000+00:00 +23509,19690211_8042,12580,26,22440,OWN,10.0,VENTURE,4750,11.86,0,2021-03-06 16:12:09.208000+00:00,2021-03-06 16:12:09.208000+00:00 +30221,19891006_8318,54937,32,40000,RENT,3.0,DEBTCONSOLIDATION,8000,9.62,0,2020-12-11 03:00:55.520000+00:00,2020-12-11 03:00:55.520000+00:00 +31898,19830519_7021,94531,29,34900,RENT,2.0,MEDICAL,11200,9.99,1,2020-11-19 18:01:28.331000+00:00,2020-11-19 18:01:28.331000+00:00 +36886,19810426_2329,27405,42,60000,MORTGAGE,1.0,MEDICAL,8000,5.42,0,2020-09-17 04:12:19.768000+00:00,2020-09-17 04:12:19.768000+00:00 +19948,19700530_6338,7078,23,73500,MORTGAGE,7.0,VENTURE,10000,6.92,0,2021-04-21 01:30:18.750000+00:00,2021-04-21 01:30:18.750000+00:00 +22172,19491204_9518,17748,26,96300,MORTGAGE,10.0,PERSONAL,15000,6.17,0,2021-03-23 17:11:17.289000+00:00,2021-03-23 17:11:17.289000+00:00 +12299,19471214_6602,25245,25,45000,RENT,4.0,MEDICAL,3000,7.9,0,2021-07-27 13:19:07.450000+00:00,2021-07-27 13:19:07.450000+00:00 +16569,19791009_5686,63944,24,53184,MORTGAGE,8.0,PERSONAL,13500,10.99,0,2021-06-03 03:08:03.945000+00:00,2021-06-03 03:08:03.945000+00:00 +32960,19580205_5390,60622,31,96000,OWN,3.0,PERSONAL,10000,9.99,0,2020-11-06 05:09:39.234000+00:00,2020-11-06 05:09:39.234000+00:00 +17130,19861204_8812,70068,23,55812,MORTGAGE,4.0,MEDICAL,21825,11.36,0,2021-05-26 23:31:32.417000+00:00,2021-05-26 23:31:32.417000+00:00 +23012,19931106_1555,29831,25,115000,MORTGAGE,1.0,EDUCATION,20000,15.65,0,2021-03-13 00:14:01.846000+00:00,2021-03-13 00:14:01.846000+00:00 +36979,19970524_1853,24245,38,61450,OWN,6.0,VENTURE,12500,7.49,0,2020-09-15 23:45:25.130000+00:00,2020-09-15 23:45:25.130000+00:00 +10829,19600930_7223,70083,23,50000,RENT,4.0,VENTURE,18225,10.74,1,2021-08-15 06:59:19.477000+00:00,2021-08-15 06:59:19.477000+00:00 +18660,19630414_5802,46982,21,65000,MORTGAGE,5.0,DEBTCONSOLIDATION,2200,12.42,0,2021-05-07 11:30:06.430000+00:00,2021-05-07 11:30:06.430000+00:00 +11012,19620615_1391,70359,25,114000,RENT,3.0,EDUCATION,17000,10.37,0,2021-08-12 23:00:33.898000+00:00,2021-08-12 23:00:33.898000+00:00 +19777,20000201_4113,39342,22,72000,OWN,6.0,VENTURE,16000,10.37,0,2021-04-23 05:48:49.536000+00:00,2021-04-23 05:48:49.536000+00:00 +21903,19641112_9044,61241,24,93000,MORTGAGE,3.0,VENTURE,18000,11.78,0,2021-03-27 03:28:28.878000+00:00,2021-03-27 03:28:28.878000+00:00 +25469,19590417_6870,23112,22,50000,MORTGAGE,6.0,VENTURE,10000,9.88,0,2021-02-09 16:38:33.173000+00:00,2021-02-09 16:38:33.173000+00:00 +28331,19940903_5969,60617,32,48000,RENT,8.0,DEBTCONSOLIDATION,5000,11.97,0,2021-01-04 05:09:45.269000+00:00,2021-01-04 05:09:45.269000+00:00 +22520,19961221_8878,49306,26,102000,MORTGAGE,5.0,PERSONAL,13000,5.42,0,2021-03-19 06:44:08.320000+00:00,2021-03-19 06:44:08.320000+00:00 +19265,19550315_6667,53048,23,33000,RENT,4.0,EDUCATION,10000,11.99,0,2021-04-29 18:26:00.664000+00:00,2021-04-29 18:26:00.664000+00:00 +16846,19940916_8629,21226,24,55000,MORTGAGE,8.0,MEDICAL,15000,9.91,0,2021-05-30 14:24:02.495000+00:00,2021-05-30 14:24:02.495000+00:00 +14911,19621018_6161,14428,22,90000,RENT,4.0,EDUCATION,5000,14.96,0,2021-06-24 06:18:47.714000+00:00,2021-06-24 06:18:47.714000+00:00 +14954,19461205_4101,71016,23,150000,RENT,3.0,VENTURE,5000,10.65,0,2021-06-23 17:09:34.709000+00:00,2021-06-23 17:09:34.709000+00:00 +19014,19560428_7329,92312,24,65450,RENT,7.0,DEBTCONSOLIDATION,9600,8.49,0,2021-05-02 23:12:50.064000+00:00,2021-05-02 23:12:50.064000+00:00 +11896,19600821_6571,44129,22,43000,RENT,6.0,PERSONAL,13500,10.95,1,2021-08-01 16:35:44.217000+00:00,2021-08-01 16:35:44.217000+00:00 +24340,19600119_4523,71740,26,165000,MORTGAGE,10.0,EDUCATION,20000,13.22,0,2021-02-24 02:00:04.859000+00:00,2021-02-24 02:00:04.859000+00:00 +20458,19760306_8686,18053,22,90000,RENT,6.0,MEDICAL,10200,11.71,0,2021-04-14 13:29:50.088000+00:00,2021-04-14 13:29:50.088000+00:00 +37561,20000210_6704,10543,41,90000,MORTGAGE,23.0,VENTURE,13200,6.62,0,2020-09-08 13:43:27.715000+00:00,2020-09-08 13:43:27.715000+00:00 +32150,19930916_9118,10579,28,80500,MORTGAGE,4.0,DEBTCONSOLIDATION,6000,7.74,0,2020-11-16 12:56:17.698000+00:00,2020-11-16 12:56:17.698000+00:00 +18313,20000205_6515,85622,24,62000,MORTGAGE,8.0,VENTURE,20000,16.7,0,2021-05-11 21:38:54.167000+00:00,2021-05-11 21:38:54.167000+00:00 +10474,19850915_7842,61555,25,47052,RENT,5.0,VENTURE,21250,10.99,1,2021-08-19 19:34:57.075000+00:00,2021-08-19 19:34:57.075000+00:00 +25101,19510123_1444,3821,25,75000,MORTGAGE,7.0,PERSONAL,15000,9.99,0,2021-02-14 09:12:46.796000+00:00,2021-02-14 09:12:46.796000+00:00 +25383,19870911_8554,23187,23,92004,MORTGAGE,7.0,EDUCATION,24250,11.12,0,2021-02-10 18:56:59.183000+00:00,2021-02-10 18:56:59.183000+00:00 +22595,19910502_4545,80470,25,92085,MORTGAGE,4.0,HOMEIMPROVEMENT,2500,15.27,1,2021-03-18 07:47:35.870000+00:00,2021-03-18 07:47:35.870000+00:00 +16364,19871213_7309,32254,24,24000,RENT,0.0,EDUCATION,6500,11.36,0,2021-06-05 17:50:36.643000+00:00,2021-06-05 17:50:36.643000+00:00 +13914,19640724_8897,98229,21,41000,MORTGAGE,4.0,DEBTCONSOLIDATION,7000,6.03,0,2021-07-06 23:17:36.687000+00:00,2021-07-06 23:17:36.687000+00:00 +34989,19630927_9393,98053,28,33600,MORTGAGE,12.0,MEDICAL,3000,5.42,0,2020-10-11 08:29:38.145000+00:00,2020-10-11 08:29:38.145000+00:00 +31359,19520603_8534,65014,27,53550,RENT,3.0,MEDICAL,4200,11.99,1,2020-11-26 14:54:12.741000+00:00,2020-11-26 14:54:12.741000+00:00 +38289,19520827_2880,34982,36,98240,MORTGAGE,4.0,VENTURE,20000,16.35,0,2020-08-30 07:01:50.331000+00:00,2020-08-30 07:01:50.331000+00:00 +31365,19740420_1544,24440,28,71007,MORTGAGE,4.0,EDUCATION,25000,18.62,0,2020-11-26 13:04:05.345000+00:00,2020-11-26 13:04:05.345000+00:00 +28349,19800518_5656,77354,29,59520,RENT,13.0,HOMEIMPROVEMENT,9000,5.42,0,2021-01-03 23:39:23.081000+00:00,2021-01-03 23:39:23.081000+00:00 +15891,19540924_4310,25839,25,50000,MORTGAGE,9.0,DEBTCONSOLIDATION,18000,7.88,0,2021-06-11 18:31:59.696000+00:00,2021-06-11 18:31:59.696000+00:00 +17069,20010130_4641,43907,24,50000,RENT,0.0,VENTURE,7200,11.71,0,2021-05-27 18:11:07.610000+00:00,2021-05-27 18:11:07.610000+00:00 +36103,20011007_8293,27712,40,35820,RENT,0.0,MEDICAL,3275,8.88,0,2020-09-27 03:43:24.949000+00:00,2020-09-27 03:43:24.949000+00:00 +15534,19571023_2396,41049,24,49000,MORTGAGE,2.0,VENTURE,20000,15.57,0,2021-06-16 07:44:19.760000+00:00,2021-06-16 07:44:19.760000+00:00 +27256,19971016_3322,78250,31,30892,RENT,3.0,MEDICAL,3100,10.59,1,2021-01-17 22:00:10.390000+00:00,2021-01-17 22:00:10.390000+00:00 +26362,19671107_2852,30002,27,132000,RENT,4.0,DEBTCONSOLIDATION,18000,8.49,0,2021-01-29 07:28:32.398000+00:00,2021-01-29 07:28:32.398000+00:00 +25530,19480802_9190,5872,24,70000,MORTGAGE,8.0,DEBTCONSOLIDATION,25000,9.62,0,2021-02-08 21:58:57.980000+00:00,2021-02-08 21:58:57.980000+00:00 +32122,19840324_9938,89049,28,80000,OWN,2.0,HOMEIMPROVEMENT,16000,11.66,0,2020-11-16 21:30:12.213000+00:00,2020-11-16 21:30:12.213000+00:00 +13278,19510702_6043,8641,23,28000,RENT,0.0,EDUCATION,4000,8.9,0,2021-07-15 01:50:40.665000+00:00,2021-07-15 01:50:40.665000+00:00 +26690,19701118_2971,76706,27,122000,RENT,3.0,PERSONAL,15000,12.73,0,2021-01-25 03:08:28.082000+00:00,2021-01-25 03:08:28.082000+00:00 +22411,19540321_1149,94141,25,100000,MORTGAGE,3.0,PERSONAL,13750,15.95,0,2021-03-20 16:04:42.681000+00:00,2021-03-20 16:04:42.681000+00:00 +19563,19810210_2392,92651,22,70000,MORTGAGE,6.0,EDUCATION,6000,7.14,0,2021-04-25 23:16:33.328000+00:00,2021-04-25 23:16:33.328000+00:00 +15385,19560627_2936,57103,25,48000,OWN,0.0,VENTURE,7500,7.05,0,2021-06-18 05:19:03.428000+00:00,2021-06-18 05:19:03.428000+00:00 +33540,19470923_2558,16827,33,38016,RENT,10.0,EDUCATION,15000,6.17,1,2020-10-29 19:44:24.285000+00:00,2020-10-29 19:44:24.285000+00:00 +34957,19670622_4084,20166,30,105000,OWN,14.0,HOMEIMPROVEMENT,15400,11.36,0,2020-10-11 18:16:57.590000+00:00,2020-10-11 18:16:57.590000+00:00 +17927,19890216_4733,1032,22,60000,MORTGAGE,7.0,VENTURE,6000,10.38,0,2021-05-16 19:43:29.978000+00:00,2021-05-16 19:43:29.978000+00:00 +17593,19631223_7957,97843,23,60000,MORTGAGE,5.0,DEBTCONSOLIDATION,10000,11.71,0,2021-05-21 01:53:41.690000+00:00,2021-05-21 01:53:41.690000+00:00 +19450,19540609_5340,40176,23,34000,RENT,7.0,EDUCATION,10000,10.36,1,2021-04-27 09:50:32.620000+00:00,2021-04-27 09:50:32.620000+00:00 +26661,19570511_6902,90036,30,28800,RENT,9.0,MEDICAL,2000,11.48,0,2021-01-25 12:00:43.829000+00:00,2021-01-25 12:00:43.829000+00:00 +25143,19880522_3824,14464,25,59004,RENT,1.0,HOMEIMPROVEMENT,20000,14.84,1,2021-02-13 20:21:55.024000+00:00,2021-02-13 20:21:55.024000+00:00 +10264,19530118_8263,65274,25,174708,RENT,9.0,VENTURE,9000,10.59,0,2021-08-22 11:49:15.936000+00:00,2021-08-22 11:49:15.936000+00:00 +18416,19920607_2455,44053,21,50000,RENT,5.0,DEBTCONSOLIDATION,8300,8.9,0,2021-05-10 14:08:27.202000+00:00,2021-05-10 14:08:27.202000+00:00 +29421,19810912_2316,68418,28,112200,RENT,11.0,MEDICAL,7000,11.66,0,2020-12-21 07:44:01.657000+00:00,2020-12-21 07:44:01.657000+00:00 +30504,19881006_7148,70665,31,65000,RENT,2.0,MEDICAL,8000,6.99,0,2020-12-07 12:26:46.674000+00:00,2020-12-07 12:26:46.674000+00:00 +35206,19810303_4939,98346,33,612000,RENT,3.0,MEDICAL,5950,12.73,0,2020-10-08 14:06:50.655000+00:00,2020-10-08 14:06:50.655000+00:00 +36827,19991105_1560,3431,41,58000,RENT,16.0,PERSONAL,7500,11.03,0,2020-09-17 22:15:12.496000+00:00,2020-09-17 22:15:12.496000+00:00 +32065,19470628_6149,48021,33,80000,MORTGAGE,9.0,VENTURE,6000,11.11,0,2020-11-17 14:56:22.475000+00:00,2020-11-17 14:56:22.475000+00:00 +37454,19610704_2084,92075,40,81120,MORTGAGE,1.0,EDUCATION,24000,9.25,0,2020-09-09 22:27:19.611000+00:00,2020-09-09 22:27:19.611000+00:00 +33790,19520918_7032,50323,29,120000,MORTGAGE,10.0,PERSONAL,4700,7.14,0,2020-10-26 15:15:56.117000+00:00,2020-10-26 15:15:56.117000+00:00 +35227,19970426_2963,75150,27,48000,RENT,0.0,DEBTCONSOLIDATION,10000,10.62,0,2020-10-08 07:41:24.769000+00:00,2020-10-08 07:41:24.769000+00:00 +21869,19711122_4217,47968,22,34800,OWN,4.0,MEDICAL,7350,14.22,0,2021-03-27 13:52:30.789000+00:00,2021-03-27 13:52:30.789000+00:00 +14492,20010903_2877,20707,25,72500,RENT,2.0,HOMEIMPROVEMENT,9000,10.59,0,2021-06-29 14:29:04.203000+00:00,2021-06-29 14:29:04.203000+00:00 +13001,19780720_4827,26374,26,42000,RENT,2.0,PERSONAL,10800,13.99,1,2021-07-18 14:34:42.115000+00:00,2021-07-18 14:34:42.115000+00:00 +35508,19510724_2105,93024,33,75000,MORTGAGE,6.0,VENTURE,6000,7.51,0,2020-10-04 17:43:58.389000+00:00,2020-10-04 17:43:58.389000+00:00 +15873,19530912_2142,55446,21,62000,RENT,2.0,EDUCATION,6000,10.25,0,2021-06-12 00:02:21.884000+00:00,2021-06-12 00:02:21.884000+00:00 +23320,19890608_1101,62037,23,24000,OWN,2.0,VENTURE,6000,15.23,0,2021-03-09 02:01:02.183000+00:00,2021-03-09 02:01:02.183000+00:00 +31614,19930124_4664,84660,29,75000,OWN,5.0,VENTURE,6000,12.69,0,2020-11-23 08:53:58.410000+00:00,2020-11-23 08:53:58.410000+00:00 +33814,19620907_5395,64644,28,91800,MORTGAGE,1.0,MEDICAL,15000,15.37,1,2020-10-26 07:55:26.533000+00:00,2020-10-26 07:55:26.533000+00:00 +32421,19961218_1873,24589,28,42996,RENT,4.0,EDUCATION,3000,7.68,0,2020-11-13 02:02:23.644000+00:00,2020-11-13 02:02:23.644000+00:00 +15675,19510112_4032,6776,23,45000,RENT,7.0,VENTURE,6000,11.49,0,2021-06-14 12:36:25.953000+00:00,2021-06-14 12:36:25.953000+00:00 +19966,19730912_9973,79081,21,74000,MORTGAGE,5.0,VENTURE,25000,12.69,0,2021-04-20 19:59:56.562000+00:00,2021-04-20 19:59:56.562000+00:00 +27749,19910605_2864,90210,29,33996,RENT,11.0,EDUCATION,4000,13.47,1,2021-01-11 15:11:42.683000+00:00,2021-01-11 15:11:42.683000+00:00 +33593,19930430_3770,60042,33,115000,MORTGAGE,3.0,PERSONAL,14000,6.03,0,2020-10-29 03:31:38.954000+00:00,2020-10-29 03:31:38.954000+00:00 +19437,19620203_9560,27331,26,40000,RENT,10.0,VENTURE,10000,16.89,0,2021-04-27 13:49:08.645000+00:00,2021-04-27 13:49:08.645000+00:00 +28307,19661124_5462,62410,33,45360,RENT,3.0,PERSONAL,5000,15.99,0,2021-01-04 12:30:14.853000+00:00,2021-01-04 12:30:14.853000+00:00 +11689,19660219_4269,19943,26,65000,RENT,3.0,MEDICAL,2000,7.51,0,2021-08-04 07:54:59.380000+00:00,2021-08-04 07:54:59.380000+00:00 +36040,19870230_1132,85063,37,40000,RENT,1.0,VENTURE,3000,11.71,0,2020-09-27 22:59:42.608000+00:00,2020-09-27 22:59:42.608000+00:00 +32299,20001012_3230,77561,29,84000,MORTGAGE,9.0,HOMEIMPROVEMENT,5900,7.66,0,2020-11-14 15:21:34.030000+00:00,2020-11-14 15:21:34.030000+00:00 +28398,19950526_7823,5850,27,73004,RENT,5.0,EDUCATION,9000,9.63,1,2021-01-03 08:40:02.680000+00:00,2021-01-03 08:40:02.680000+00:00 +25591,19920127_9712,62355,22,26400,RENT,1.0,PERSONAL,3500,15.27,0,2021-02-08 03:19:22.787000+00:00,2021-02-08 03:19:22.787000+00:00 +11647,19900712_8240,31808,22,33996,RENT,1.0,EDUCATION,14900,15.37,1,2021-08-04 20:45:51.152000+00:00,2021-08-04 20:45:51.152000+00:00 +26358,19740823_1239,19154,33,103000,RENT,17.0,PERSONAL,18000,16.82,0,2021-01-29 08:41:57.329000+00:00,2021-01-29 08:41:57.329000+00:00 +15364,19670430_8377,30439,23,25000,RENT,7.0,VENTURE,6000,7.51,0,2021-06-18 11:44:29.314000+00:00,2021-06-18 11:44:29.314000+00:00 +23675,19950102_1276,34208,23,73000,RENT,7.0,EDUCATION,15000,7.51,1,2021-03-04 13:25:24.585000+00:00,2021-03-04 13:25:24.585000+00:00 +28617,19670601_7686,78004,32,75000,RENT,4.0,MEDICAL,5000,9.99,0,2020-12-31 13:40:32.725000+00:00,2020-12-31 13:40:32.725000+00:00 +31585,19690530_8292,76621,32,74346,MORTGAGE,16.0,MEDICAL,7000,7.9,0,2020-11-23 17:46:14.157000+00:00,2020-11-23 17:46:14.157000+00:00 +15679,19950805_9294,77205,22,39000,RENT,6.0,HOMEIMPROVEMENT,6000,13.22,1,2021-06-14 11:23:01.022000+00:00,2021-06-14 11:23:01.022000+00:00 +23098,19611207_1373,7311,26,119000,MORTGAGE,6.0,VENTURE,15000,14.42,0,2021-03-11 21:55:35.836000+00:00,2021-03-11 21:55:35.836000+00:00 +25845,19831123_9073,99119,27,200000,MORTGAGE,7.0,PERSONAL,15000,9.88,0,2021-02-04 21:37:29.689000+00:00,2021-02-04 21:37:29.689000+00:00 +19378,19770808_9735,95690,22,69996,OWN,2.0,MEDICAL,8000,12.84,0,2021-04-28 07:52:01.372000+00:00,2021-04-28 07:52:01.372000+00:00 +28408,19910630_9243,11516,32,44400,MORTGAGE,5.0,HOMEIMPROVEMENT,6500,14.84,0,2021-01-03 05:36:30.353000+00:00,2021-01-03 05:36:30.353000+00:00 +18997,19870308_7912,49331,26,66000,MORTGAGE,7.0,PERSONAL,4000,11.99,0,2021-05-03 04:24:51.020000+00:00,2021-05-03 04:24:51.020000+00:00 +36539,19571122_6912,44632,39,20400,RENT,3.0,MEDICAL,6000,15.27,0,2020-09-21 14:21:07.505000+00:00,2020-09-21 14:21:07.505000+00:00 +19326,19920109_3054,76311,23,36000,RENT,5.0,MEDICAL,10000,9.99,0,2021-04-28 23:46:25.471000+00:00,2021-04-28 23:46:25.471000+00:00 +18114,19510725_6343,96104,23,24000,RENT,4.0,DEBTCONSOLIDATION,5000,10.62,0,2021-05-14 10:31:19.468000+00:00,2021-05-14 10:31:19.468000+00:00 +27879,19550715_9996,55342,28,40000,MORTGAGE,12.0,HOMEIMPROVEMENT,15000,5.99,0,2021-01-09 23:25:42.436000+00:00,2021-01-09 23:25:42.436000+00:00 +38227,19600710_1107,24134,42,30000,RENT,5.0,VENTURE,8000,14.96,0,2020-08-31 01:59:46.756000+00:00,2020-08-31 01:59:46.756000+00:00 +29627,19920415_9040,94111,32,43000,RENT,2.0,MEDICAL,6950,9.91,0,2020-12-18 16:43:07.727000+00:00,2020-12-18 16:43:07.727000+00:00 +20649,19681216_5909,88044,24,80000,MORTGAGE,8.0,EDUCATION,10000,8.9,0,2021-04-12 03:04:14.647000+00:00,2021-04-12 03:04:14.647000+00:00 +29235,19490301_7917,79608,33,50000,OWN,1.0,VENTURE,12000,13.99,0,2020-12-23 16:37:50.934000+00:00,2020-12-23 16:37:50.934000+00:00 +20281,19520125_8596,42204,23,75000,RENT,0.0,PERSONAL,10000,13.47,0,2021-04-16 19:38:28.270000+00:00,2021-04-16 19:38:28.270000+00:00 +14229,19690925_6673,96820,22,36996,RENT,0.0,MEDICAL,5000,5.42,0,2021-07-02 22:56:08.395000+00:00,2021-07-02 22:56:08.395000+00:00 diff --git a/ui/feature_repo/data/training_dataset_sample.parquet b/ui/feature_repo/data/training_dataset_sample.parquet new file mode 100644 index 0000000000..e9622aaf23 Binary files /dev/null and b/ui/feature_repo/data/training_dataset_sample.parquet differ diff --git a/ui/feature_repo/data/zipcode_table.parquet b/ui/feature_repo/data/zipcode_table.parquet new file mode 100644 index 0000000000..87b67e13e1 Binary files /dev/null and b/ui/feature_repo/data/zipcode_table.parquet differ diff --git a/ui/feature_repo/data/zipcode_table_sample.csv b/ui/feature_repo/data/zipcode_table_sample.csv new file mode 100644 index 0000000000..5690d7ffd3 --- /dev/null +++ b/ui/feature_repo/data/zipcode_table_sample.csv @@ -0,0 +1,2885 @@ +zipcode,city,state,location_type,tax_returns_filed,population,total_wages,event_timestamp,created_timestamp +1463,PEPPERELL,MA,PRIMARY,5549,10100,310246738,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +80477,STEAMBOAT SPRINGS,CO,PRIMARY,4376,6952,196199680,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +19401,NORRISTOWN,PA,PRIMARY,17975,29398,593090218,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +31028,CENTERVILLE,GA,PRIMARY,2791,5013,103348211,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +65802,SPRINGFIELD,MO,PRIMARY,18096,30388,484432932,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +45101,ABERDEEN,OH,PRIMARY,1001,1826,29504355,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +39356,ROSE HILL,MS,PRIMARY,319,583,9854530,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +79019,CLAUDE,TX,PRIMARY,790,1486,25573842,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +2322,AVON,MA,PRIMARY,2294,3831,98754584,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +74355,MIAMI,OK,PRIMARY,352,618,11028437,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95658,NEWCASTLE,CA,PRIMARY,3092,5562,136983650,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +47036,OLDENBURG,IN,PRIMARY,558,984,21447674,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +65580,VICHY,MO,PRIMARY,365,665,9315944,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +8234,EGG HARBOR TOWNSHIP,NJ,PRIMARY,21001,37623,919724508,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62671,NEW HOLLAND,IL,PRIMARY,280,498,8845843,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +77831,BEDIAS,TX,PRIMARY,1178,2191,34547649,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +77518,BACLIFF,TX,PRIMARY,3754,6825,123827585,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +84783,DAMMERON VALLEY,UT,PRIMARY,330,644,10937901,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +13090,LIVERPOOL,NY,PRIMARY,15076,26337,669746981,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +35674,TUSCUMBIA,AL,PRIMARY,7779,14316,238664103,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +51012,CHEROKEE,IA,PRIMARY,3217,5529,90168643,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +71935,CADDO GAP,AR,PRIMARY,271,527,5072844,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +18829,LE RAYSVILLE,PA,PRIMARY,370,673,10542911,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +54512,BOULDER JUNCTION,WI,PRIMARY,641,1080,12456566,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +46037,FISHERS,IN,PRIMARY,14420,29121,1109574040,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +31301,ALLENHURST,GA,PRIMARY,1617,3025,43253409,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +23441,TASLEY,VA,PRIMARY,282,484,5249223,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +53532,DE FOREST,WI,PRIMARY,6637,12095,308509381,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +71007,BETHANY,LA,PRIMARY,521,909,16527337,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +31216,MACON,GA,PRIMARY,6918,12714,251849778,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +65264,MARTINSBURG,MO,PRIMARY,404,710,10992871,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +68781,TILDEN,NE,PRIMARY,678,1239,17490571,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32780,TITUSVILLE,FL,PRIMARY,16101,27299,530363329,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +5905,GUILDHALL,VT,PRIMARY,325,584,9251932,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38348,LAVINIA,TN,PRIMARY,410,757,11777928,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +78505,MCALLEN,TX,PRIMARY,902,1682,22515793,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +3894,WOLFEBORO,NH,PRIMARY,3113,5312,114608359,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +15363,STRABANE,PA,PRIMARY,433,709,10185876,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +39474,PRENTISS,MS,PRIMARY,2695,4780,63767399,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +14220,BUFFALO,NY,PRIMARY,11783,19047,369097924,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +56678,SOLWAY,MN,PRIMARY,541,1010,17871305,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +11726,COPIAGUE,NY,PRIMARY,10282,17101,360442098,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60472,ROBBINS,IL,PRIMARY,2193,3628,42695628,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +40111,CLOVERPORT,KY,PRIMARY,588,1104,15394648,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +1801,WOBURN,MA,PRIMARY,20667,33608,978759571,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +15332,FINLEYVILLE,PA,PRIMARY,4295,7209,144526193,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +91309,CANOGA PARK,CA,PRIMARY,379,590,12803555,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32362,WOODVILLE,FL,PRIMARY,563,951,15487000,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +55763,MAKINEN,MN,PRIMARY,272,469,8786552,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +97438,FALL CREEK,OR,PRIMARY,584,1032,20705230,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +6786,TERRYVILLE,CT,PRIMARY,4856,8443,210651428,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95333,LE GRAND,CA,PRIMARY,1118,2167,29961304,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +29525,CLIO,SC,PRIMARY,966,1747,20933523,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +6105,HARTFORD,CT,PRIMARY,8233,12164,294308268,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +85658,MARANA,AZ,PRIMARY,2913,5442,143076605,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48505,FLINT,MI,PRIMARY,9440,15571,121379263,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +51638,ESSEX,IA,PRIMARY,607,1118,18363054,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +51243,LITTLE ROCK,IA,PRIMARY,332,622,8558473,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +1061,NORTHAMPTON,MA,PRIMARY,331,423,10230348,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +70434,COVINGTON,LA,PRIMARY,1197,2050,35322862,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +7836,FLANDERS,NJ,PRIMARY,5629,10541,436195117,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +56132,GARVIN,MN,PRIMARY,259,477,7914548,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +11972,SPEONK,NY,PRIMARY,486,834,19114157,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28480,WRIGHTSVILLE BEACH,NC,PRIMARY,1502,2209,67485629,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +10459,BRONX,NY,PRIMARY,17977,30434,414021653,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27055,YADKINVILLE,NC,PRIMARY,6120,11454,195133931,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +35802,HUNTSVILLE,AL,PRIMARY,10852,18463,521820459,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +83217,BANCROFT,ID,PRIMARY,327,641,9211460,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +68649,NORTH BEND,NE,PRIMARY,830,1508,24530635,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60004,ARLINGTON HEIGHTS,IL,PRIMARY,26586,46798,1560430403,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +97233,PORTLAND,OR,PRIMARY,14465,26032,388691435,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +58254,MCVILLE,ND,PRIMARY,271,447,5976092,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +6242,EASTFORD,CT,PRIMARY,773,1360,34542738,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +56576,RICHVILLE,MN,PRIMARY,428,764,9247938,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +5340,BONDVILLE,VT,PRIMARY,436,720,14859739,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98241,DARRINGTON,WA,PRIMARY,1041,1835,33476575,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95164,SAN JOSE,CA,PRIMARY,577,819,29710183,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +71222,BERNICE,LA,PRIMARY,1298,2432,35843765,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33126,MIAMI,FL,PRIMARY,22630,36254,540253154,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +55118,SAINT PAUL,MN,PRIMARY,14389,23819,672233974,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28376,RAEFORD,NC,PRIMARY,14162,27467,456241205,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +7751,MORGANVILLE,NJ,PRIMARY,9056,18058,854320255,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +96821,HONOLULU,HI,PRIMARY,9986,16918,585034532,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92502,RIVERSIDE,CA,PRIMARY,468,714,16543594,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +73086,SULPHUR,OK,PRIMARY,3309,6006,97286402,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +65707,MILLER,MO,PRIMARY,954,1770,25170637,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28329,CLINTON,NC,PRIMARY,1353,2505,35505191,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +31805,CUSSETA,GA,PRIMARY,1217,2263,29402457,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +55127,SAINT PAUL,MN,PRIMARY,9099,15796,673842538,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49744,HERRON,MI,PRIMARY,409,745,11054272,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +81419,HOTCHKISS,CO,PRIMARY,1645,3028,49313392,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +91789,WALNUT,CA,PRIMARY,20233,37363,1007412826,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +2790,WESTPORT,MA,PRIMARY,7817,13765,336390312,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +64790,WALKER,MO,PRIMARY,353,651,6840515,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +44814,BERLIN HEIGHTS,OH,PRIMARY,1435,2551,47949650,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60043,KENILWORTH,IL,PRIMARY,1285,2371,287126507,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +16731,ELDRED,PA,PRIMARY,1348,2396,39293086,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +1867,READING,MA,PRIMARY,11926,21499,808066167,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +6460,MILFORD,CT,PRIMARY,20773,34070,1088354615,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +91108,SAN MARINO,CA,PRIMARY,6783,12520,720791687,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +5079,VERSHIRE,VT,PRIMARY,315,538,8801718,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49405,CUSTER,MI,PRIMARY,732,1304,19421246,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +79007,BORGER,TX,PRIMARY,5878,11067,222350247,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +26753,RIDGELEY,WV,PRIMARY,2881,5251,101358267,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +19079,SHARON HILL,PA,PRIMARY,4444,7196,129870560,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +17545,MANHEIM,PA,PRIMARY,10663,18860,390865308,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +55384,SPRING PARK,MN,PRIMARY,879,1265,35767491,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +97910,JORDAN VALLEY,OR,PRIMARY,297,526,5005716,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +58433,EDGELEY,ND,PRIMARY,551,957,13317429,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +59468,POWER,MT,PRIMARY,277,500,7159255,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38674,TIPLERSVILLE,MS,PRIMARY,389,741,10667663,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +96003,REDDING,CA,PRIMARY,17737,30950,552655890,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +77516,ANGLETON,TX,PRIMARY,1202,2127,43951334,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +4330,AUGUSTA,ME,PRIMARY,11840,19658,369684475,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33957,SANIBEL,FL,PRIMARY,3247,5478,171898751,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +75429,COMMERCE,TX,PRIMARY,459,737,13080280,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32949,GRANT,FL,PRIMARY,996,1761,41362053,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +16033,EVANS CITY,PA,PRIMARY,3101,5421,120748627,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +4074,SCARBOROUGH,ME,PRIMARY,8902,15910,465173618,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +46703,ANGOLA,IN,PRIMARY,8147,14245,252694968,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38345,HURON,TN,PRIMARY,771,1489,25579967,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +13616,CALCIUM,NY,PRIMARY,740,1340,21326989,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +79336,LEVELLAND,TX,PRIMARY,6896,12753,236728752,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +44643,MAGNOLIA,OH,PRIMARY,1532,2693,45076025,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +5142,CAVENDISH,VT,PRIMARY,351,601,9548506,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +85214,MESA,AZ,PRIMARY,481,806,16020989,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49759,MILLERSBURG,MI,PRIMARY,726,1296,13380152,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +54665,VIROQUA,WI,PRIMARY,3873,6813,105279780,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +87715,CLEVELAND,NM,PRIMARY,251,429,4435153,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +59930,REXFORD,MT,PRIMARY,294,533,4300907,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +14174,YOUNGSTOWN,NY,PRIMARY,2925,5187,121049602,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +88049,MIMBRES,NM,PRIMARY,412,715,7855276,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +41804,BLACKEY,KY,PRIMARY,332,657,11316849,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +81041,GRANADA,CO,PRIMARY,282,553,6707537,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +43964,TORONTO,OH,PRIMARY,4632,8006,138654280,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +99827,HAINES,AK,PRIMARY,1331,2152,33427226,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95696,VACAVILLE,CA,PRIMARY,1201,2007,45919613,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +2663,SOUTH WELLFLEET,MA,PRIMARY,344,539,6973313,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +66865,OLPE,KS,PRIMARY,530,991,18127412,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +47512,BICKNELL,IN,PRIMARY,1645,2942,43810253,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95948,GRIDLEY,CA,PRIMARY,4244,8183,123636608,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +21524,CORRIGANVILLE,MD,PRIMARY,263,461,7257542,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +31795,TY TY,GA,PRIMARY,910,1673,23810078,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +96818,HONOLULU,HI,PRIMARY,21125,40131,828284548,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +20751,DEALE,MD,PRIMARY,1171,1944,58633820,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +94903,SAN RAFAEL,CA,PRIMARY,14328,23974,794700161,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +63939,FAIRDEALING,MO,PRIMARY,567,1090,13109575,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +70546,JENNINGS,LA,PRIMARY,6766,12538,229974408,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92658,NEWPORT BEACH,CA,PRIMARY,1360,2015,85396822,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61334,LOSTANT,IL,PRIMARY,369,647,11198469,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +47017,CROSS PLAINS,IN,PRIMARY,297,531,9102921,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32455,PONCE DE LEON,FL,PRIMARY,1719,3181,43532543,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +96818,HONOLULU,HI,PRIMARY,21125,40131,828284548,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +71469,ROBELINE,LA,PRIMARY,1023,2019,35930399,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +17876,SHAMOKIN DAM,PA,PRIMARY,819,1381,23144279,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +12525,GARDINER,NY,PRIMARY,1784,3067,82457095,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +59038,HYSHAM,MT,PRIMARY,306,544,7853090,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +91394,GRANADA HILLS,CA,PRIMARY,317,493,10565696,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98257,LA CONNER,WA,PRIMARY,2127,3632,64153805,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +43110,CANAL WINCHESTER,OH,PRIMARY,15631,27043,647524954,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +79936,EL PASO,TX,PRIMARY,46203,88503,1519147089,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +91214,LA CRESCENTA,CA,PRIMARY,13591,25651,742668478,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +58027,ENDERLIN,ND,PRIMARY,656,1158,20030392,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +36088,TUSKEGEE INSTITUTE,AL,PRIMARY,1208,1889,22206967,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +70071,LUTCHER,LA,PRIMARY,1624,2934,58133715,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +91401,VAN NUYS,CA,PRIMARY,17251,28864,626351718,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +53590,SUN PRAIRIE,WI,PRIMARY,17475,31182,840767256,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95666,PIONEER,CA,PRIMARY,2400,4179,65818845,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +66736,FREDONIA,KS,PRIMARY,1822,3298,48165696,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +42464,WHITE PLAINS,KY,PRIMARY,728,1433,21907471,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92211,PALM DESERT,CA,PRIMARY,11167,18234,294398455,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +97532,MERLIN,OR,PRIMARY,1564,2777,35301586,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +20117,MIDDLEBURG,VA,PRIMARY,681,1131,49083280,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +10464,BRONX,NY,PRIMARY,2131,3381,108256016,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +46960,MONTEREY,IN,PRIMARY,539,978,13921870,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +36205,ANNISTON,AL,PRIMARY,290,482,10716010,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +46301,BEVERLY SHORES,IN,PRIMARY,346,565,16263261,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +47124,GREENVILLE,IN,PRIMARY,2034,3856,95933773,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +73717,ALVA,OK,PRIMARY,2747,4738,73136986,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +7071,LYNDHURST,NJ,PRIMARY,10509,17397,493255803,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +23181,WEST POINT,VA,PRIMARY,2653,4776,102638627,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +31523,BRUNSWICK,GA,PRIMARY,5667,10672,226099291,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +78739,AUSTIN,TX,PRIMARY,6676,14154,565514937,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +8829,HIGH BRIDGE,NJ,PRIMARY,1857,3328,115616477,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +13440,ROME,NY,PRIMARY,18712,31499,589434231,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48173,ROCKWOOD,MI,PRIMARY,6080,11095,268466610,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +96755,KAPAAU,HI,PRIMARY,1827,3148,54341976,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37144,PETERSBURG,TN,PRIMARY,1408,2674,41962060,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +72634,FLIPPIN,AR,PRIMARY,2001,3636,45678269,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +45225,CINCINNATI,OH,PRIMARY,3049,5039,47671786,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +50849,GREENFIELD,IA,PRIMARY,1347,2386,39405946,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +8202,AVALON,NJ,PRIMARY,938,1508,53452847,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48173,ROCKWOOD,MI,PRIMARY,6080,11095,268466610,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +4015,CASCO,ME,PRIMARY,1508,2620,51686158,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +43764,NEW LEXINGTON,OH,PRIMARY,3485,6156,98763597,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +76241,GAINESVILLE,TX,PRIMARY,1033,1786,30991572,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +77293,HOUSTON,TX,PRIMARY,350,567,8069094,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +17093,SUMMERDALE,PA,PRIMARY,375,612,11420268,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +10527,GRANITE SPRINGS,NY,PRIMARY,473,901,48392560,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +69024,CULBERTSON,NE,PRIMARY,560,1020,14927250,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +41064,MOUNT OLIVET,KY,PRIMARY,806,1505,21730258,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60481,WILMINGTON,IL,PRIMARY,5926,10277,234769230,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +14853,ITHACA,NY,PRIMARY,302,342,5064499,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +2876,SLATERSVILLE,RI,PRIMARY,765,1211,29798698,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +4769,PRESQUE ISLE,ME,PRIMARY,4645,7779,140950065,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +7055,PASSAIC,NJ,PRIMARY,28463,51244,778397023,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92570,PERRIS,CA,PRIMARY,16762,33297,488394932,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +77865,MARQUEZ,TX,PRIMARY,702,1311,21332997,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +87420,SHIPROCK,NM,PRIMARY,3099,5236,93508324,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +50648,JESUP,IA,PRIMARY,1883,3476,69459329,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +99710,FAIRBANKS,AK,PRIMARY,875,1451,40361564,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +29378,UNA,SC,PRIMARY,528,975,9403657,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48730,EAST TAWAS,MI,PRIMARY,2295,3876,51005023,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +36373,SHORTERVILLE,AL,PRIMARY,258,472,5785190,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +31789,SUMNER,GA,PRIMARY,626,1189,18145167,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +39752,MATHISTON,MS,PRIMARY,955,1857,28716799,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38337,GADSDEN,TN,PRIMARY,617,1139,17603973,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28746,LAKE LURE,NC,PRIMARY,1071,1864,23427879,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +83334,HANSEN,ID,PRIMARY,781,1501,20265526,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +12561,NEW PALTZ,NY,PRIMARY,6999,11547,302822801,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +39202,JACKSON,MS,PRIMARY,3284,5162,103618280,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +23663,HAMPTON,VA,PRIMARY,6269,10559,173186203,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +77531,CLUTE,TX,PRIMARY,6417,11888,239538094,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +6382,UNCASVILLE,CT,PRIMARY,5357,8952,204331395,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +94064,REDWOOD CITY,CA,PRIMARY,775,1200,35609802,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +25301,CHARLESTON,WV,PRIMARY,1341,1800,34568020,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +77639,ORANGEFIELD,TX,PRIMARY,327,583,11019630,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +21220,MIDDLE RIVER,MD,PRIMARY,18828,31809,727543155,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +75146,LANCASTER,TX,PRIMARY,8347,14595,287543216,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +80480,WALDEN,CO,PRIMARY,578,985,14818961,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +24073,CHRISTIANSBURG,VA,PRIMARY,13101,23032,479491366,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +94519,CONCORD,CA,PRIMARY,8562,14804,379103572,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +75103,CANTON,TX,PRIMARY,5479,10257,179207421,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60827,RIVERDALE,IL,PRIMARY,11337,19042,279694388,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28905,MARBLE,NC,PRIMARY,1260,2318,29569881,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92115,SAN DIEGO,CA,PRIMARY,22503,36221,751378269,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +68957,LAWRENCE,NE,PRIMARY,308,573,6934357,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +18101,ALLENTOWN,PA,PRIMARY,1347,2150,20693943,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30066,MARIETTA,GA,PRIMARY,25212,46080,1357866092,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +31410,SAVANNAH,GA,PRIMARY,11344,19812,566547068,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +97391,TOLEDO,OR,PRIMARY,2247,4019,69303687,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +87823,LEMITAR,NM,PRIMARY,300,538,6855610,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +64856,PINEVILLE,MO,PRIMARY,1609,2929,45969820,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +77227,HOUSTON,TX,PRIMARY,653,1169,148401980,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +18931,LAHASKA,PA,PRIMARY,261,400,10852299,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +93618,DINUBA,CA,PRIMARY,9673,19322,254938092,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49040,COLON,MI,PRIMARY,1484,2644,38842291,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +50050,CHURDAN,IA,PRIMARY,330,592,7540289,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +31022,DUDLEY,GA,PRIMARY,675,1271,23032195,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +93004,VENTURA,CA,PRIMARY,12882,23167,609355820,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33921,BOCA GRANDE,FL,PRIMARY,846,1392,124128680,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33327,FORT LAUDERDALE,FL,PRIMARY,8150,17177,724191841,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +84315,HOOPER,UT,PRIMARY,2989,5926,136607626,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49130,UNION,MI,PRIMARY,904,1590,33750696,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +89135,LAS VEGAS,NV,PRIMARY,10996,19020,768482880,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95605,WEST SACRAMENTO,CA,PRIMARY,5155,9280,153053927,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +77388,SPRING,TX,PRIMARY,17956,33840,1035769149,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92393,VICTORVILLE,CA,PRIMARY,1503,2612,51957551,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +29518,CADES,SC,PRIMARY,627,1145,15357461,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +25529,JULIAN,WV,PRIMARY,330,640,10754869,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92114,SAN DIEGO,CA,PRIMARY,27373,50254,875844144,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30413,BARTOW,GA,PRIMARY,727,1344,19137594,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +44129,CLEVELAND,OH,PRIMARY,15246,24474,474355918,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +31082,SANDERSVILLE,GA,PRIMARY,4625,8384,137036852,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48652,RHODES,MI,PRIMARY,791,1437,23753720,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +24527,BLAIRS,VA,PRIMARY,1448,2606,43651777,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +21783,SMITHSBURG,MD,PRIMARY,4105,7642,186231945,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +55419,MINNEAPOLIS,MN,PRIMARY,13238,22317,900807593,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27878,SHARPSBURG,NC,PRIMARY,1103,1977,25901168,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +46989,UPLAND,IN,PRIMARY,1685,3128,56729502,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +10107,NEW YORK,NY,PRIMARY,474,598,102139661,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +55109,SAINT PAUL,MN,PRIMARY,16070,26675,609894829,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +2809,BRISTOL,RI,PRIMARY,9965,16829,456496819,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +50650,LAMONT,IA,PRIMARY,340,602,8241625,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27959,NAGS HEAD,NC,PRIMARY,1523,2496,44156270,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60033,HARVARD,IL,PRIMARY,6498,11934,219079842,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +77830,ANDERSON,TX,PRIMARY,1216,2140,40569745,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +65737,REEDS SPRING,MO,PRIMARY,3708,6567,85648448,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +23005,ASHLAND,VA,PRIMARY,7206,12231,311338934,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30003,NORCROSS,GA,PRIMARY,323,525,7721994,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +50541,GILMORE CITY,IA,PRIMARY,401,723,10641243,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +91367,WOODLAND HILLS,CA,PRIMARY,18718,31168,1030182882,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33913,FORT MYERS,FL,PRIMARY,6646,12218,341174549,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +31757,THOMASVILLE,GA,PRIMARY,4025,7537,140595679,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +64748,GOLDEN CITY,MO,PRIMARY,659,1169,14676481,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +72112,NEWPORT,AR,PRIMARY,3774,6715,93621747,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +84651,PAYSON,UT,PRIMARY,8458,17332,315168222,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +73505,LAWTON,OK,PRIMARY,20404,37053,623591583,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27705,DURHAM,NC,PRIMARY,18938,31340,804835237,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +20914,SILVER SPRING,MD,PRIMARY,354,541,13866212,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37036,CHARLOTTE,TN,PRIMARY,2459,4605,79487171,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +11428,QUEENS VILLAGE,NY,PRIMARY,9216,16017,341921071,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +80728,FLEMING,CO,PRIMARY,389,711,10649677,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +22920,AFTON,VA,PRIMARY,1903,3352,75656244,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +77381,SPRING,TX,PRIMARY,16411,31122,1672426589,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +85911,CIBECUE,AZ,PRIMARY,428,849,7184367,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +44035,ELYRIA,OH,PRIMARY,30760,50414,913037894,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +87499,FARMINGTON,NM,PRIMARY,13305,24735,341621613,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +22201,ARLINGTON,VA,PRIMARY,20312,26353,1570573667,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +93626,FRIANT,CA,PRIMARY,667,1202,39735058,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61931,HUMBOLDT,IL,PRIMARY,524,968,14578315,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +63939,FAIRDEALING,MO,PRIMARY,567,1090,13109575,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +34270,TALLEVAST,FL,PRIMARY,427,666,12690881,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +80524,FORT COLLINS,CO,PRIMARY,14641,24832,555037289,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +20636,HOLLYWOOD,MD,PRIMARY,4560,8431,254490988,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +29451,ISLE OF PALMS,SC,PRIMARY,2203,3790,133836057,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +47994,WINGATE,IN,PRIMARY,290,524,9709726,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +91763,MONTCLAIR,CA,PRIMARY,14454,27063,453244725,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +14810,BATH,NY,PRIMARY,5367,9174,151990420,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +77990,TIVOLI,TX,PRIMARY,332,613,9543106,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61281,SHERRARD,IL,PRIMARY,1230,2317,56932197,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33177,MIAMI,FL,PRIMARY,23647,41930,693480552,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33173,MIAMI,FL,PRIMARY,16018,26714,628049188,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +75937,CHIRENO,TX,PRIMARY,472,891,12580055,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +78711,AUSTIN,TX,PRIMARY,266,396,12410388,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +78702,AUSTIN,TX,PRIMARY,9463,14748,243264352,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98206,EVERETT,WA,PRIMARY,1288,1947,49607523,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +64483,ROSENDALE,MO,PRIMARY,279,558,9784501,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +47108,CAMPBELLSBURG,IN,PRIMARY,1006,1885,26784300,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62573,WARRENSBURG,IL,PRIMARY,746,1374,27530398,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +26679,MOUNT NEBO,WV,PRIMARY,797,1518,27096101,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +70033,METAIRIE,LA,PRIMARY,254,391,6492783,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +12464,PHOENICIA,NY,PRIMARY,520,816,13164275,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +96148,TAHOE VISTA,CA,PRIMARY,387,632,10908505,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +4106,SOUTH PORTLAND,ME,PRIMARY,12850,20593,475989261,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38450,COLLINWOOD,TN,PRIMARY,1069,2055,28218645,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62292,TILDEN,IL,PRIMARY,407,703,9172253,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61546,MANITO,IL,PRIMARY,2038,3744,68886153,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +22734,REMINGTON,VA,PRIMARY,1402,2573,60250049,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49913,CALUMET,MI,PRIMARY,3231,5571,78206758,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +97526,GRANTS PASS,OR,PRIMARY,13798,24106,339304233,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +1001,AGAWAM,MA,PRIMARY,8596,14021,337735143,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +39122,NATCHEZ,MS,PRIMARY,354,597,10888991,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98241,DARRINGTON,WA,PRIMARY,1041,1835,33476575,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +18621,HUNLOCK CREEK,PA,PRIMARY,2431,4390,82057175,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28618,DEEP GAP,NC,PRIMARY,1010,1837,29327282,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +19053,FEASTERVILLE TREVOSE,PA,PRIMARY,14193,24238,649468583,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +79110,AMARILLO,TX,PRIMARY,8483,15253,283911288,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +36740,FORKLAND,AL,PRIMARY,526,943,12631738,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +15083,SUTERSVILLE,PA,PRIMARY,494,844,14378082,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +68739,HARTINGTON,NE,PRIMARY,1410,2596,37707057,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +45619,CHESAPEAKE,OH,PRIMARY,3547,6472,110189433,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +14219,BUFFALO,NY,PRIMARY,6011,9751,183028157,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +99523,ANCHORAGE,AK,PRIMARY,1199,1856,51216649,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +8067,PEDRICKTOWN,NJ,PRIMARY,821,1451,35607626,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +79351,ODONNELL,TX,PRIMARY,573,1076,13006690,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +58107,FARGO,ND,PRIMARY,279,401,8551964,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92865,ORANGE,CA,PRIMARY,8810,15604,408519540,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +80482,WINTER PARK,CO,PRIMARY,1186,1767,42662858,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +85006,PHOENIX,AZ,PRIMARY,8660,15463,231696954,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92116,SAN DIEGO,CA,PRIMARY,17446,24400,719960889,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +46766,LIBERTY CENTER,IN,PRIMARY,261,473,8706611,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95487,VINEBURG,CA,PRIMARY,276,470,8891097,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +23234,RICHMOND,VA,PRIMARY,18687,31921,562632911,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +75778,MURCHISON,TX,PRIMARY,1254,2311,34676291,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +96726,HONAUNAU,HI,PRIMARY,640,1088,14513183,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +68862,ORD,NE,PRIMARY,1390,2496,36990100,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +81226,FLORENCE,CO,PRIMARY,2396,4384,69073349,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +75224,DALLAS,TX,PRIMARY,12840,23717,327341629,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +13808,MORRIS,NY,PRIMARY,790,1411,23193396,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +40068,SMITHFIELD,KY,PRIMARY,1050,1973,42721880,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60443,MATTESON,IL,PRIMARY,10013,16892,398674460,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +76630,BRUCEVILLE,TX,PRIMARY,742,1397,29530450,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37410,CHATTANOOGA,TN,PRIMARY,1387,2314,21960081,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +1035,HADLEY,MA,PRIMARY,2617,4327,97134728,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +67954,ROLLA,KS,PRIMARY,339,638,9587901,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +53202,MILWAUKEE,WI,PRIMARY,13057,15593,651034663,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +93223,FARMERSVILLE,CA,PRIMARY,3332,6716,79055086,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +35149,SYCAMORE,AL,PRIMARY,307,576,7263074,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +44643,MAGNOLIA,OH,PRIMARY,1532,2693,45076025,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +15148,WILMERDING,PA,PRIMARY,1248,1898,26440710,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +53518,BLUE RIVER,WI,PRIMARY,639,1134,16364196,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +35544,BEAVERTON,AL,PRIMARY,281,532,7195412,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48509,BURTON,MI,PRIMARY,4853,8434,137064063,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +17845,MILLMONT,PA,PRIMARY,959,1790,21610107,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +51025,HOLSTEIN,IA,PRIMARY,979,1759,27681993,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60952,MELVIN,IL,PRIMARY,301,574,8869339,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +12871,SCHUYLERVILLE,NY,PRIMARY,1912,3391,70321403,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +25507,CEREDO,WV,PRIMARY,660,1074,16947203,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +17872,SHAMOKIN,PA,PRIMARY,4540,7558,110316191,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +56115,BALATON,MN,PRIMARY,671,1176,19547722,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +56560,MOORHEAD,MN,PRIMARY,16801,28610,578340681,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30417,CLAXTON,GA,PRIMARY,3746,6867,102717818,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +68361,GENEVA,NE,PRIMARY,1224,2206,35898742,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60619,CHICAGO,IL,PRIMARY,30282,48410,780108829,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +76627,BLUM,TX,PRIMARY,659,1261,22306849,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +64446,FAIRFAX,MO,PRIMARY,532,968,13707849,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +35565,HALEYVILLE,AL,PRIMARY,5214,9940,139423643,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +85637,SONOITA,AZ,PRIMARY,687,1225,26050942,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27622,RALEIGH,NC,PRIMARY,322,505,11320081,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +47038,PATRIOT,IN,PRIMARY,508,923,14597846,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +24569,LONG ISLAND,VA,PRIMARY,420,782,12002292,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +4949,LIBERTY,ME,PRIMARY,450,824,11317144,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +17024,ELLIOTTSBURG,PA,PRIMARY,897,1648,32568610,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +35453,COTTONDALE,AL,PRIMARY,4575,8490,158892811,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +64016,BUCKNER,MO,PRIMARY,2047,3796,77052694,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48081,SAINT CLAIR SHORES,MI,PRIMARY,11042,18281,405089294,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61334,LOSTANT,IL,PRIMARY,369,647,11198469,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +15436,FAIRCHANCE,PA,PRIMARY,1267,2180,33272489,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60507,AURORA,IL,PRIMARY,878,1505,22654746,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +45629,FRANKLIN FURNACE,OH,PRIMARY,1270,2369,42009646,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +52064,MILES,IA,PRIMARY,386,738,12309161,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92254,MECCA,CA,PRIMARY,5189,11107,96913687,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +5836,EAST HARDWICK,VT,PRIMARY,486,880,13556320,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33156,MIAMI,FL,PRIMARY,14785,26260,1409543303,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +39326,DALEVILLE,MS,PRIMARY,322,571,8463548,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60074,PALATINE,IL,PRIMARY,18681,33175,815369950,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +83832,GENESEE,ID,PRIMARY,688,1314,25534920,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +53144,KENOSHA,WI,PRIMARY,11933,20705,477473327,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61550,MORTON,IL,PRIMARY,8765,15680,439743630,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49322,CORAL,MI,PRIMARY,467,889,13648922,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +36863,LANETT,AL,PRIMARY,5246,9401,131928321,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +89407,FALLON,NV,PRIMARY,966,1609,28995773,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61014,CHADWICK,IL,PRIMARY,600,1081,17994066,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +5069,SOUTH RYEGATE,VT,PRIMARY,335,581,9218551,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32332,GRETNA,FL,PRIMARY,954,1735,19600826,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +11432,JAMAICA,NY,PRIMARY,24626,41968,815001669,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38650,MYRTLE,MS,PRIMARY,1498,2838,39922440,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +70596,LAFAYETTE,LA,PRIMARY,554,893,21752695,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +94305,STANFORD,CA,PRIMARY,3657,5084,258000176,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +63349,HAWK POINT,MO,PRIMARY,869,1642,30398195,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +41080,PETERSBURG,KY,PRIMARY,866,1565,33353316,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +23417,ONANCOCK,VA,PRIMARY,1730,2899,46157669,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +74864,PRAGUE,OK,PRIMARY,2049,3896,62764134,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +69169,WALLACE,NE,PRIMARY,268,517,7566728,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +44651,MECHANICSTOWN,OH,PRIMARY,361,664,8956470,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60403,CREST HILL,IL,PRIMARY,8302,13949,284179527,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +12946,LAKE PLACID,NY,PRIMARY,2893,4566,92540183,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +46166,PARAGON,IN,PRIMARY,1042,1961,31695699,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +63544,GREEN CASTLE,MO,PRIMARY,339,640,7140159,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +46733,DECATUR,IN,PRIMARY,8894,15973,274309636,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +39342,MARION,MS,PRIMARY,751,1300,23864412,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +29108,NEWBERRY,SC,PRIMARY,8671,15490,229507254,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92315,BIG BEAR LAKE,CA,PRIMARY,3747,6550,101626614,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98015,BELLEVUE,WA,PRIMARY,571,868,33095427,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +17967,RINGTOWN,PA,PRIMARY,1214,2146,39500842,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +84774,TOQUERVILLE,UT,PRIMARY,537,1053,16028044,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +96094,WEED,CA,PRIMARY,2674,4766,72748977,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +73738,GARBER,OK,PRIMARY,531,967,15313682,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48074,SMITHS CREEK,MI,PRIMARY,4322,7771,147486251,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +19475,SPRING CITY,PA,PRIMARY,5109,8983,273158219,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +78801,UVALDE,TX,PRIMARY,7898,14841,195993185,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +45150,MILFORD,OH,PRIMARY,15545,27012,646795539,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61378,WEST BROOKLYN,IL,PRIMARY,259,466,7152076,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28392,TAR HEEL,NC,PRIMARY,762,1422,21138637,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +15957,STRONGSTOWN,PA,PRIMARY,258,455,7607813,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +85756,TUCSON,AZ,PRIMARY,7604,14665,238131456,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +53065,OAKFIELD,WI,PRIMARY,1049,1933,40506986,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +57709,RAPID CITY,SD,PRIMARY,1679,2528,47339015,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +68716,BEEMER,NE,PRIMARY,455,815,11540833,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +85086,PHOENIX,AZ,PRIMARY,14825,29056,850418427,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95518,ARCATA,CA,PRIMARY,965,1402,21280014,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +73932,BEAVER,OK,PRIMARY,940,1800,31409085,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +39564,OCEAN SPRINGS,MS,PRIMARY,15851,28673,644589818,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37148,PORTLAND,TN,PRIMARY,9620,18152,316659098,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +40903,ARTEMUS,KY,PRIMARY,340,665,7398555,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +83830,FERNWOOD,ID,PRIMARY,280,526,6637168,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +7620,ALPINE,NJ,PRIMARY,970,1696,193066840,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38483,SUMMERTOWN,TN,PRIMARY,2111,4137,61501726,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +97867,NORTH POWDER,OR,PRIMARY,325,613,7302914,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28504,KINSTON,NC,PRIMARY,9070,16316,287556016,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +41146,HITCHINS,KY,PRIMARY,274,513,6709382,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +19341,EXTON,PA,PRIMARY,8813,15505,611500552,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +15676,PLEASANT UNITY,PA,PRIMARY,286,506,9873123,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +42367,POWDERLY,KY,PRIMARY,439,826,9949922,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +54481,STEVENS POINT,WI,PRIMARY,16425,27473,563847441,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +11717,BRENTWOOD,NY,PRIMARY,28265,48090,850200013,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +1037,HARDWICK,MA,PRIMARY,388,714,17327614,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +43447,MILLBURY,OH,PRIMARY,1819,3111,68923846,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +99803,JUNEAU,AK,PRIMARY,2223,3545,94757025,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +79235,FLOYDADA,TX,PRIMARY,1646,3085,36797398,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +74501,MCALESTER,OK,PRIMARY,10505,18926,346429620,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +11724,COLD SPRING HARBOR,NY,PRIMARY,1803,3232,271529797,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +23221,RICHMOND,VA,PRIMARY,7764,10957,456885469,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +20905,SILVER SPRING,MD,PRIMARY,9042,16516,592923713,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +29464,MOUNT PLEASANT,SC,PRIMARY,22010,36456,1094083177,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61764,PONTIAC,IL,PRIMARY,6262,10677,201511259,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49426,HUDSONVILLE,MI,PRIMARY,14860,28748,657602868,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +20710,BLADENSBURG,MD,PRIMARY,4030,6493,122820042,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +17961,ORWIGSBURG,PA,PRIMARY,3430,5985,147532725,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +40157,PAYNEVILLE,KY,PRIMARY,445,825,13008680,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32937,SATELLITE BEACH,FL,PRIMARY,12963,21943,573425547,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +80833,RUSH,CO,PRIMARY,275,521,6971785,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37724,CUMBERLAND GAP,TN,PRIMARY,863,1614,24587713,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +14820,CAMERON MILLS,NY,PRIMARY,294,543,9261359,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49202,JACKSON,MI,PRIMARY,8750,14274,207103338,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +97324,ALSEA,OR,PRIMARY,481,844,13463500,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +76302,WICHITA FALLS,TX,PRIMARY,5102,8940,163036221,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +34233,SARASOTA,FL,PRIMARY,8728,14328,274182890,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +76645,HILLSBORO,TX,PRIMARY,4547,8425,122502546,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +11730,EAST ISLIP,NY,PRIMARY,7031,12627,416491794,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +57471,ROSCOE,SD,PRIMARY,320,573,6051349,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +14477,KENT,NY,PRIMARY,803,1443,25182121,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +36027,EUFAULA,AL,PRIMARY,6494,11678,178675618,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +15486,VANDERBILT,PA,PRIMARY,1125,2027,34013004,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +35810,HUNTSVILLE,AL,PRIMARY,14235,23653,372751691,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +80030,WESTMINSTER,CO,PRIMARY,6660,11249,205713219,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +80615,EATON,CO,PRIMARY,3196,6149,131390991,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +1901,LYNN,MA,PRIMARY,830,1155,22363981,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +11802,HICKSVILLE,NY,PRIMARY,411,620,16176164,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48877,RIVERDALE,MI,PRIMARY,994,1862,26716044,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +21224,BALTIMORE,MD,PRIMARY,22061,33150,876200844,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +42140,GAMALIEL,KY,PRIMARY,505,948,10871559,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +67101,MAIZE,KS,PRIMARY,1555,2961,66297614,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38011,BRIGHTON,TN,PRIMARY,4069,7861,151924852,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37934,KNOXVILLE,TN,PRIMARY,11134,21174,681700579,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +71749,JUNCTION CITY,AR,PRIMARY,1152,2213,36329410,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +79424,LUBBOCK,TX,PRIMARY,17537,32403,917331139,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28401,WILMINGTON,NC,PRIMARY,9214,14224,221277005,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +59018,CLYDE PARK,MT,PRIMARY,296,524,7916287,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +42160,PARK CITY,KY,PRIMARY,825,1574,21279315,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +39564,OCEAN SPRINGS,MS,PRIMARY,15851,28673,644589818,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +93062,SIMI VALLEY,CA,PRIMARY,615,962,26928183,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +29728,PAGELAND,SC,PRIMARY,3823,7007,93366796,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +65284,STURGEON,MO,PRIMARY,1020,1882,35803834,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +54859,MINONG,WI,PRIMARY,943,1706,23769015,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +18832,MONROETON,PA,PRIMARY,817,1411,20753238,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +68106,OMAHA,NE,PRIMARY,10521,15966,353634802,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +3741,CANAAN,NH,PRIMARY,2056,3514,78885778,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +31783,REBECCA,GA,PRIMARY,445,841,12182704,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32302,TALLAHASSEE,FL,PRIMARY,584,899,28539963,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +64493,TURNEY,MO,PRIMARY,254,472,8658158,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +2808,BRADFORD,RI,PRIMARY,1153,2043,41610566,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +55325,DASSEL,MN,PRIMARY,2019,3752,74636255,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +26201,BUCKHANNON,WV,PRIMARY,7871,14223,238611381,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +54213,FORESTVILLE,WI,PRIMARY,660,1230,21581543,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62926,DONGOLA,IL,PRIMARY,952,1748,24532517,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +20851,ROCKVILLE,MD,PRIMARY,7019,11459,287309289,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +80047,AURORA,CO,PRIMARY,876,1362,29554617,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +79248,PADUCAH,TX,PRIMARY,657,1199,14323236,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +3254,MOULTONBOROUGH,NH,PRIMARY,1816,3190,66627549,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +97361,MONMOUTH,OR,PRIMARY,4168,7320,133670416,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +47948,GOODLAND,IN,PRIMARY,674,1164,17962691,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +4281,SOUTH PARIS,ME,PRIMARY,2174,3737,62067583,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61614,PEORIA,IL,PRIMARY,14848,24235,663839867,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +13037,CHITTENANGO,NY,PRIMARY,4286,7640,165223974,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28213,CHARLOTTE,NC,PRIMARY,15011,25793,481719203,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +17538,LANDISVILLE,PA,PRIMARY,3271,5913,153033333,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +50641,HAZLETON,IA,PRIMARY,637,1245,15331941,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +85204,MESA,AZ,PRIMARY,23251,42421,728526805,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +40771,WOODBINE,KY,PRIMARY,615,1160,11736119,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98579,ROCHESTER,WA,PRIMARY,5410,9937,202874004,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48122,MELVINDALE,MI,PRIMARY,4831,8081,127084375,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95630,FOLSOM,CA,PRIMARY,28672,53688,1899333607,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27501,ANGIER,NC,PRIMARY,7058,13444,246564481,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48348,CLARKSTON,MI,PRIMARY,10702,20501,735123611,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +10012,NEW YORK,NY,PRIMARY,13415,17738,1529644615,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +63146,SAINT LOUIS,MO,PRIMARY,15396,24912,689003338,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +54205,CASCO,WI,PRIMARY,1035,1910,36998005,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +21009,ABINGDON,MD,PRIMARY,14151,25308,779581367,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +2643,EAST ORLEANS,MA,PRIMARY,661,1081,26076809,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +40010,BUCKNER,KY,PRIMARY,297,607,16781891,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61080,SOUTH BELOIT,IL,PRIMARY,4857,8878,193089401,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +43837,PORT WASHINGTON,OH,PRIMARY,842,1506,23766200,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +1850,LOWELL,MA,PRIMARY,6713,11001,222920009,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +4976,SKOWHEGAN,ME,PRIMARY,4356,7550,135684772,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95641,ISLETON,CA,PRIMARY,879,1486,23897009,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +45205,CINCINNATI,OH,PRIMARY,7065,11782,161986979,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30711,CRANDALL,GA,PRIMARY,1404,2690,39044255,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +47142,MAUCKPORT,IN,PRIMARY,423,749,11610685,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +91913,CHULA VISTA,CA,PRIMARY,15720,31558,850730380,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +24599,WINGINA,VA,PRIMARY,252,437,7197428,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +50830,AFTON,IA,PRIMARY,751,1402,21968576,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +54562,THREE LAKES,WI,PRIMARY,1067,1804,25919880,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +51542,HONEY CREEK,IA,PRIMARY,466,874,22186723,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +24085,EAGLE ROCK,VA,PRIMARY,903,1648,30684275,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +16510,ERIE,PA,PRIMARY,12020,21150,404957360,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +56320,COLD SPRING,MN,PRIMARY,3770,6910,147883240,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +17540,LEOLA,PA,PRIMARY,4693,8322,153303595,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +12956,MINEVILLE,NY,PRIMARY,529,913,15438631,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +12076,GILBOA,NY,PRIMARY,579,1012,16755100,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +59011,BIG TIMBER,MT,PRIMARY,1540,2695,40805157,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +2863,CENTRAL FALLS,RI,PRIMARY,7451,12807,155524266,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +29030,CAMERON,SC,PRIMARY,869,1521,26674338,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +89128,LAS VEGAS,NV,PRIMARY,17206,28310,691134906,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27540,HOLLY SPRINGS,NC,PRIMARY,11397,23508,741818553,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +23059,GLEN ALLEN,VA,PRIMARY,13278,25802,1059404731,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +7304,JERSEY CITY,NJ,PRIMARY,18986,31180,608970277,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +65325,COLE CAMP,MO,PRIMARY,1383,2531,34221073,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +84782,VEYO,UT,PRIMARY,313,584,8745723,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +75225,DALLAS,TX,PRIMARY,10943,19408,1646556171,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +96027,ETNA,CA,PRIMARY,904,1656,23105894,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +76017,ARLINGTON,TX,PRIMARY,20979,37503,982754586,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +13090,LIVERPOOL,NY,PRIMARY,15076,26337,669746981,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +5669,ROXBURY,VT,PRIMARY,268,462,6659703,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30528,CLEVELAND,GA,PRIMARY,8912,16648,256094867,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +72940,HUNTINGTON,AR,PRIMARY,958,1848,29321227,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61204,ROCK ISLAND,IL,PRIMARY,291,410,7912628,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +7303,JERSEY CITY,NJ,PRIMARY,1115,1707,37296479,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98650,TROUT LAKE,WA,PRIMARY,443,796,14616106,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62901,CARBONDALE,IL,PRIMARY,8676,13025,215833434,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95562,RIO DELL,CA,PRIMARY,1202,2147,28716384,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +14616,ROCHESTER,NY,PRIMARY,14962,24464,446717959,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +64421,AMAZONIA,MO,PRIMARY,424,779,13151840,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +78522,BROWNSVILLE,TX,PRIMARY,437,780,8320614,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +55337,BURNSVILLE,MN,PRIMARY,22600,38193,997781459,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27939,GRANDY,NC,PRIMARY,1167,2059,30526226,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92423,SAN BERNARDINO,CA,PRIMARY,615,1021,25765924,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +79907,EL PASO,TX,PRIMARY,23443,43435,476840996,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +94708,BERKELEY,CA,PRIMARY,6100,10062,456099830,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62314,BAYLIS,IL,PRIMARY,278,514,7225452,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +45323,ENON,OH,PRIMARY,2734,4727,94452916,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95008,CAMPBELL,CA,PRIMARY,21629,36168,1394476952,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +21557,RAWLINGS,MD,PRIMARY,842,1524,25768478,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +11746,HUNTINGTON STATION,NY,PRIMARY,32197,56971,1986757091,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +36032,FORT DEPOSIT,AL,PRIMARY,1206,2230,27095732,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +20108,MANASSAS,VA,PRIMARY,973,1594,38004138,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +74435,GORE,OK,PRIMARY,1297,2448,36616210,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +72766,SPRINGDALE,AR,PRIMARY,674,1201,23452182,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +65810,SPRINGFIELD,MO,PRIMARY,9759,17974,458849836,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +93725,FRESNO,CA,PRIMARY,8415,16278,213137120,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +12155,SCHENEVUS,NY,PRIMARY,782,1378,22636112,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +54984,WILD ROSE,WI,PRIMARY,1579,2719,40442597,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +29565,LATTA,SC,PRIMARY,2861,5245,70550097,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +83655,NEW PLYMOUTH,ID,PRIMARY,1778,3448,51204695,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +78003,BANDERA,TX,PRIMARY,4052,7180,113119211,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95490,WILLITS,CA,PRIMARY,5400,9434,145592603,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +14550,SILVER SPRINGS,NY,PRIMARY,794,1369,25310025,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +25063,DUCK,WV,PRIMARY,533,1010,13857726,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +70033,METAIRIE,LA,PRIMARY,254,391,6492783,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +66063,OLATHE,KS,PRIMARY,333,552,11467971,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +47142,MAUCKPORT,IN,PRIMARY,423,749,11610685,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49544,GRAND RAPIDS,MI,PRIMARY,4518,7557,148235773,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +53583,SAUK CITY,WI,PRIMARY,2906,5021,111960066,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +15214,PITTSBURGH,PA,PRIMARY,6767,10700,203171183,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37656,FALL BRANCH,TN,PRIMARY,1672,3062,51649956,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +34142,IMMOKALEE,FL,PRIMARY,5475,10418,112722244,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +39563,MOSS POINT,MS,PRIMARY,5897,10216,162668146,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60661,CHICAGO,IL,PRIMARY,4459,5726,369568564,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +52003,DUBUQUE,IA,PRIMARY,6880,12170,313811358,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +44890,WILLARD,OH,PRIMARY,5298,9165,153785787,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +22546,RUTHER GLEN,VA,PRIMARY,6997,12545,292216115,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92823,BREA,CA,PRIMARY,1557,3045,105550592,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +71021,CULLEN,LA,PRIMARY,432,755,8115762,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +56026,ELLENDALE,MN,PRIMARY,959,1719,31827756,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +2126,MATTAPAN,MA,PRIMARY,12276,19266,399176436,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33308,FORT LAUDERDALE,FL,PRIMARY,16094,23333,787866464,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +14548,SHORTSVILLE,NY,PRIMARY,1925,3340,64636212,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +17097,WICONISCO,PA,PRIMARY,395,689,11037517,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +17864,PORT TREVORTON,PA,PRIMARY,992,1809,23000164,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +69152,MULLEN,NE,PRIMARY,412,749,8961104,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +7503,PATERSON,NJ,PRIMARY,8454,14649,213985183,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +56368,RICHMOND,MN,PRIMARY,2010,3685,71040061,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +56728,HALLOCK,MN,PRIMARY,723,1247,18369573,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +7723,DEAL,NJ,PRIMARY,535,875,38425444,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +51250,SIOUX CENTER,IA,PRIMARY,3078,5875,118901813,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +6379,PAWCATUCK,CT,PRIMARY,4464,7620,205807466,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +25504,BARBOURSVILLE,WV,PRIMARY,5542,9894,202926890,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +79029,DUMAS,TX,PRIMARY,6753,12948,231494323,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +1756,MENDON,MA,PRIMARY,2753,5234,188037787,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38367,RAMER,TN,PRIMARY,1054,2011,32698860,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +24015,ROANOKE,VA,PRIMARY,7911,12643,285429315,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60921,CHATSWORTH,IL,PRIMARY,731,1258,18053181,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +70884,BATON ROUGE,LA,PRIMARY,493,805,33251831,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +79045,HEREFORD,TX,PRIMARY,7886,15188,206430730,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +47850,FARMERSBURG,IN,PRIMARY,1173,2120,37762044,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +80653,WELDONA,CO,PRIMARY,340,626,10585279,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +20692,VALLEY LEE,MD,PRIMARY,589,1054,28617466,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +53039,JUNEAU,WI,PRIMARY,2063,3685,76517600,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +45230,CINCINNATI,OH,PRIMARY,14079,23360,674299362,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +31629,DIXIE,GA,PRIMARY,370,683,8799045,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27865,PINETOWN,NC,PRIMARY,824,1543,25142734,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +66007,BASEHOR,KS,PRIMARY,2704,4989,129081698,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +25671,DINGESS,WV,PRIMARY,415,848,12544551,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +19344,HONEY BROOK,PA,PRIMARY,5555,9962,212867444,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +51640,HAMBURG,IA,PRIMARY,760,1339,22284123,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32925,PATRICK AFB,FL,PRIMARY,524,1019,20001344,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +21864,STOCKTON,MD,PRIMARY,293,522,9373767,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33197,MIAMI,FL,PRIMARY,628,1028,15702441,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +97032,HUBBARD,OR,PRIMARY,2003,3828,67957870,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +90056,LOS ANGELES,CA,PRIMARY,4421,7060,232623389,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30513,BLUE RIDGE,GA,PRIMARY,4412,8079,102776437,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +79225,CHILLICOTHE,TX,PRIMARY,424,791,11983029,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +21014,BEL AIR,MD,PRIMARY,17433,31074,936065910,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +24599,WINGINA,VA,PRIMARY,252,437,7197428,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +68812,AMHERST,NE,PRIMARY,388,702,10500665,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +2460,NEWTONVILLE,MA,PRIMARY,4687,7764,361543245,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +56560,MOORHEAD,MN,PRIMARY,16801,28610,578340681,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +55017,DALBO,MN,PRIMARY,319,580,12207218,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +8067,PEDRICKTOWN,NJ,PRIMARY,821,1451,35607626,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32413,PANAMA CITY BEACH,FL,PRIMARY,6421,10441,195562587,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49762,NAUBINWAY,MI,PRIMARY,310,543,5863212,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +66852,GRIDLEY,KS,PRIMARY,294,557,9551913,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +85552,THATCHER,AZ,PRIMARY,2354,4487,90479970,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +77640,PORT ARTHUR,TX,PRIMARY,7011,12079,189750941,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28016,BESSEMER CITY,NC,PRIMARY,5559,10243,164642296,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +84513,CASTLE DALE,UT,PRIMARY,639,1281,24539679,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32170,NEW SMYRNA BEACH,FL,PRIMARY,805,1221,22539279,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +63857,KENNETT,MO,PRIMARY,4915,9013,133612279,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +70444,KENTWOOD,LA,PRIMARY,3899,7259,116352404,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +23829,CAPRON,VA,PRIMARY,573,1030,17889491,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +77238,HOUSTON,TX,PRIMARY,301,479,6557203,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +73050,LANGSTON,OK,PRIMARY,290,465,5214735,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28128,NORWOOD,NC,PRIMARY,3235,5964,96140145,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +8075,RIVERSIDE,NJ,PRIMARY,14165,24711,642668487,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +45209,CINCINNATI,OH,PRIMARY,5543,7341,234157208,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +67208,WICHITA,KS,PRIMARY,8799,14041,286314548,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +6511,NEW HAVEN,CT,PRIMARY,21024,31415,762115986,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +71327,COTTONPORT,LA,PRIMARY,1549,2922,45276535,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +79325,FARWELL,TX,PRIMARY,927,1790,26169992,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +22604,WINCHESTER,VA,PRIMARY,958,1534,34379854,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +24557,GRETNA,VA,PRIMARY,3367,6054,91103640,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +83014,WILSON,WY,PRIMARY,1496,2334,96854336,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +10025,NEW YORK,NY,PRIMARY,47778,70190,1675737238,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +83655,NEW PLYMOUTH,ID,PRIMARY,1778,3448,51204695,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +14231,BUFFALO,NY,PRIMARY,267,399,9522851,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28451,LELAND,NC,PRIMARY,11397,20410,404692253,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +52653,WAPELLO,IA,PRIMARY,1594,2893,51701946,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32148,INTERLACHEN,FL,PRIMARY,4019,7169,92768490,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +53190,WHITEWATER,WI,PRIMARY,6267,10569,201835644,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98232,BOW,WA,PRIMARY,1955,3469,77454557,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +22728,MIDLAND,VA,PRIMARY,1389,2546,68995450,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +63965,VAN BUREN,MO,PRIMARY,1195,2132,26119506,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +74126,TULSA,OK,PRIMARY,3985,6893,81627990,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +91909,CHULA VISTA,CA,PRIMARY,474,841,11782141,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +59829,DARBY,MT,PRIMARY,1130,1938,24292384,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60103,BARTLETT,IL,PRIMARY,20134,37918,1252379138,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48894,WESTPHALIA,MI,PRIMARY,1053,2033,39758498,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +75486,SUMNER,TX,PRIMARY,1017,2011,33670660,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +20773,UPPER MARLBORO,MD,PRIMARY,344,513,17558570,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +20195,RESTON,VA,PRIMARY,287,438,20790433,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +77517,SANTA FE,TX,PRIMARY,2612,4944,131423741,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +56014,BRICELYN,MN,PRIMARY,395,697,8518763,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +15690,VANDERGRIFT,PA,PRIMARY,4358,7442,116409170,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +3282,WENTWORTH,NH,PRIMARY,462,806,13482596,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +73165,OKLAHOMA CITY,OK,PRIMARY,2598,5059,120394853,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33777,SEMINOLE,FL,PRIMARY,8280,14037,335188768,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +79096,WHEELER,TX,PRIMARY,906,1776,33320288,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +54738,ELEVA,WI,PRIMARY,1598,3015,71309354,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +2673,WEST YARMOUTH,MA,PRIMARY,4460,6986,125350712,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +58104,FARGO,ND,PRIMARY,12906,22549,708573115,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49441,MUSKEGON,MI,PRIMARY,17321,29547,565270175,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +47527,DUBOIS,IN,PRIMARY,975,1745,32164989,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +71921,AMITY,AR,PRIMARY,1162,2241,27039702,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +80218,DENVER,CO,PRIMARY,10809,13313,525937276,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +5254,MANCHESTER,VT,PRIMARY,499,845,19826468,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32183,OCKLAWAHA,FL,PRIMARY,560,951,12547019,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28480,WRIGHTSVILLE BEACH,NC,PRIMARY,1502,2209,67485629,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30188,WOODSTOCK,GA,PRIMARY,24012,43717,1173642244,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +18237,MCADOO,PA,PRIMARY,1634,2684,42689012,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +90221,COMPTON,CA,PRIMARY,18585,36005,462648300,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +13655,HOGANSBURG,NY,PRIMARY,1086,2150,32842598,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +39057,CONEHATTA,MS,PRIMARY,888,1630,21110771,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33076,POMPANO BEACH,FL,PRIMARY,12888,25322,947666207,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +72959,WINSLOW,AR,PRIMARY,1050,1990,27749155,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +73079,POCASSET,OK,PRIMARY,264,531,9620964,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +7750,MONMOUTH BEACH,NJ,PRIMARY,1827,3013,156890532,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33027,HOLLYWOOD,FL,PRIMARY,25047,45429,1196502485,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +54409,ANTIGO,WI,PRIMARY,6388,11005,182534975,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +56331,FREEPORT,MN,PRIMARY,1002,1816,30541680,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +64110,KANSAS CITY,MO,PRIMARY,6528,9801,205182278,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +45362,ROSSBURG,OH,PRIMARY,538,941,16658552,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92276,THOUSAND PALMS,CA,PRIMARY,3064,5380,70593564,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62010,BETHALTO,IL,PRIMARY,5429,9675,207613949,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62611,ARENZVILLE,IL,PRIMARY,504,927,18239642,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95903,BEALE AFB,CA,PRIMARY,846,1623,25497108,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33762,CLEARWATER,FL,PRIMARY,3351,5211,178625482,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +55753,KEEWATIN,MN,PRIMARY,497,830,13125293,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +72128,POYEN,AR,PRIMARY,343,687,10260487,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48413,BAD AXE,MI,PRIMARY,3736,6417,100458067,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +42204,ALLENSVILLE,KY,PRIMARY,299,548,7749892,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +35802,HUNTSVILLE,AL,PRIMARY,10852,18463,521820459,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27619,RALEIGH,NC,PRIMARY,414,630,16204724,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +72207,LITTLE ROCK,AR,PRIMARY,5627,9222,436797031,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48075,SOUTHFIELD,MI,PRIMARY,10626,16960,375465460,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62351,MENDON,IL,PRIMARY,879,1613,28403364,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +70582,SAINT MARTINVILLE,LA,PRIMARY,8569,15665,285085506,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +80205,DENVER,CO,PRIMARY,13419,20189,456593085,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60450,MORRIS,IL,PRIMARY,10340,18207,444554365,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +85119,APACHE JUNCTION,AZ,PRIMARY,689,1207,20656687,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +87514,ARROYO SECO,NM,PRIMARY,613,970,12634735,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +75125,FERRIS,TX,PRIMARY,2732,5213,85432345,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +22728,MIDLAND,VA,PRIMARY,1389,2546,68995450,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +45828,COLDWATER,OH,PRIMARY,3084,5327,105167744,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +89002,HENDERSON,NV,PRIMARY,13583,24976,723953476,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +85547,PAYSON,AZ,PRIMARY,1006,1729,24055404,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +75030,ROWLETT,TX,PRIMARY,476,849,23056349,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +4493,WEST ENFIELD,ME,PRIMARY,775,1455,23720499,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +84341,LOGAN,UT,PRIMARY,7668,14269,264368076,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +77803,BRYAN,TX,PRIMARY,11115,20368,263041853,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +97396,WILLAMINA,OR,PRIMARY,1404,2516,40441721,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +52733,CLINTON,IA,PRIMARY,361,548,6668997,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +53214,MILWAUKEE,WI,PRIMARY,17812,27867,566023689,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +10524,GARRISON,NY,PRIMARY,2076,3723,159699797,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +94039,MOUNTAIN VIEW,CA,PRIMARY,471,688,32743775,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +21639,GREENSBORO,MD,PRIMARY,2035,3629,69195085,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +53716,MADISON,WI,PRIMARY,10288,16904,400293102,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92254,MECCA,CA,PRIMARY,5189,11107,96913687,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +68319,BRADSHAW,NE,PRIMARY,262,488,7042064,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +65653,FORSYTH,MO,PRIMARY,2529,4465,54047876,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +36456,MC KENZIE,AL,PRIMARY,794,1465,21806318,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30107,BALL GROUND,GA,PRIMARY,5458,10682,240554192,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +1810,ANDOVER,MA,PRIMARY,16462,30346,1580077483,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28376,RAEFORD,NC,PRIMARY,14162,27467,456241205,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30566,OAKWOOD,GA,PRIMARY,3720,6624,119917038,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +50864,VILLISCA,IA,PRIMARY,933,1681,24850601,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27288,EDEN,NC,PRIMARY,9808,17582,269043217,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +51461,SCHLESWIG,IA,PRIMARY,580,1032,16056173,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +72023,CABOT,AR,PRIMARY,14585,28306,594957681,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +74455,PORUM,OK,PRIMARY,1106,2075,29370040,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +19320,COATESVILLE,PA,PRIMARY,23309,41003,1070387587,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +50056,COLO,IA,PRIMARY,659,1191,22964118,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +74852,MACOMB,OK,PRIMARY,595,1127,16583731,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61844,FITHIAN,IL,PRIMARY,516,930,17564785,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +20187,WARRENTON,VA,PRIMARY,6817,13112,459401358,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +50441,HAMPTON,IA,PRIMARY,2591,4742,75598555,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49887,STEPHENSON,MI,PRIMARY,1065,1891,26108281,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +87045,PREWITT,NM,PRIMARY,553,1003,10997054,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28521,CHINQUAPIN,NC,PRIMARY,845,1578,23750994,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +43209,COLUMBUS,OH,PRIMARY,13327,21668,682621181,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62425,DUNDAS,IL,PRIMARY,307,550,7950991,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32164,PALM COAST,FL,PRIMARY,17242,31048,451633982,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +2370,ROCKLAND,MA,PRIMARY,8794,14690,372071550,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30076,ROSWELL,GA,PRIMARY,19161,34455,1087537165,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49779,ROGERS CITY,MI,PRIMARY,2261,3945,52424806,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +64725,ARCHIE,MO,PRIMARY,987,1894,36226313,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38139,GERMANTOWN,TN,PRIMARY,7629,15172,864017202,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98009,BELLEVUE,WA,PRIMARY,926,1433,73539686,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49779,ROGERS CITY,MI,PRIMARY,2261,3945,52424806,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28644,LAUREL SPRINGS,NC,PRIMARY,605,1115,13849064,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +90808,LONG BEACH,CA,PRIMARY,18909,33131,1062835453,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +16374,KENNERDELL,PA,PRIMARY,853,1523,23284895,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95759,ELK GROVE,CA,PRIMARY,768,1325,28631213,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +21733,FAIRPLAY,MD,PRIMARY,573,1070,26855409,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +80741,MERINO,CO,PRIMARY,396,759,11923247,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +31798,WRAY,GA,PRIMARY,432,827,11634929,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +91906,CAMPO,CA,PRIMARY,1282,2441,49118662,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +76126,FORT WORTH,TX,PRIMARY,9571,17198,506360212,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +56181,WELCOME,MN,PRIMARY,566,1015,15963154,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +72466,SMITHVILLE,AR,PRIMARY,459,915,11592459,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +97913,NYSSA,OR,PRIMARY,2007,3893,47994364,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +11209,BROOKLYN,NY,PRIMARY,32762,51567,1567801401,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +29301,SPARTANBURG,SC,PRIMARY,13226,23385,451629488,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37088,LEBANON,TN,PRIMARY,725,1157,20940138,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +1903,LYNN,MA,PRIMARY,531,851,13606118,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92248,LA QUINTA,CA,PRIMARY,548,966,20372887,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +22193,WOODBRIDGE,VA,PRIMARY,32955,60216,1665525403,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +20177,LEESBURG,VA,PRIMARY,515,820,26736081,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +74565,SAVANNA,OK,PRIMARY,392,701,13384620,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +57022,DELL RAPIDS,SD,PRIMARY,2427,4446,97342309,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +76932,BIG LAKE,TX,PRIMARY,1383,2780,60231796,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +21158,WESTMINSTER,MD,PRIMARY,9983,18082,485160426,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +23833,CHURCH ROAD,VA,PRIMARY,991,1846,39171958,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +44053,LORAIN,OH,PRIMARY,8702,14680,285228163,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +78580,RAYMONDVILLE,TX,PRIMARY,4414,8338,100258071,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48822,EAGLE,MI,PRIMARY,1409,2555,57687648,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +81067,ROCKY FORD,CO,PRIMARY,2201,4033,52837830,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33549,LUTZ,FL,PRIMARY,7730,13920,369893330,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49262,NORTH ADAMS,MI,PRIMARY,564,1018,14010802,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +74152,TULSA,OK,PRIMARY,282,434,13351387,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +23665,HAMPTON,VA,PRIMARY,1935,3830,66821348,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +70420,ABITA SPRINGS,LA,PRIMARY,3511,6539,136634569,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30008,MARIETTA,GA,PRIMARY,11686,20427,367217309,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +59016,BUSBY,MT,PRIMARY,271,519,6754987,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +39657,OSYKA,MS,PRIMARY,971,1819,27772258,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +15376,WEST ALEXANDER,PA,PRIMARY,796,1474,26936690,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98585,SILVER CREEK,WA,PRIMARY,311,561,8499284,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +18436,LAKE ARIEL,PA,PRIMARY,6183,10997,195734197,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +94920,BELVEDERE TIBURON,CA,PRIMARY,6348,10900,869131648,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33548,LUTZ,FL,PRIMARY,3078,5601,172146924,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +46126,FAIRLAND,IN,PRIMARY,2306,4262,86190260,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +97006,BEAVERTON,OR,PRIMARY,28732,50291,1294732226,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +83444,ROBERTS,ID,PRIMARY,500,992,13955008,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48081,SAINT CLAIR SHORES,MI,PRIMARY,11042,18281,405089294,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +77026,HOUSTON,TX,PRIMARY,9436,16377,191123201,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +35771,SECTION,AL,PRIMARY,1465,2819,42174753,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32837,ORLANDO,FL,PRIMARY,23738,41755,859684130,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +51537,HARLAN,IA,PRIMARY,3282,5729,98638674,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +77365,PORTER,TX,PRIMARY,10291,19604,460822437,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33706,SAINT PETERSBURG,FL,PRIMARY,8231,12575,332946452,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +10034,NEW YORK,NY,PRIMARY,20855,32614,604187876,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +72365,MARKED TREE,AR,PRIMARY,1217,2179,27825131,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +43402,BOWLING GREEN,OH,PRIMARY,13379,21455,484123577,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +56524,CLITHERALL,MN,PRIMARY,356,640,8219349,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +17366,WINDSOR,PA,PRIMARY,2675,4740,97297953,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +17851,MOUNT CARMEL,PA,PRIMARY,3593,5842,89656801,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +31642,PEARSON,GA,PRIMARY,1802,3525,40432353,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +78070,SPRING BRANCH,TX,PRIMARY,6323,12245,336004817,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +57471,ROSCOE,SD,PRIMARY,320,573,6051349,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +40160,RADCLIFF,KY,PRIMARY,9749,17547,265254732,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +14715,BOLIVAR,NY,PRIMARY,1206,2139,34342047,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +52747,DURANT,IA,PRIMARY,1094,1983,41187754,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +41230,LOUISA,KY,PRIMARY,4374,8613,140741418,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +90230,CULVER CITY,CA,PRIMARY,15572,25528,765127473,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +4419,CARMEL,ME,PRIMARY,1269,2343,43979594,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +44305,AKRON,OH,PRIMARY,10399,16710,271067506,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +58402,JAMESTOWN,ND,PRIMARY,463,706,14607507,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49757,MACKINAC ISLAND,MI,PRIMARY,502,709,15195247,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +67202,WICHITA,KS,PRIMARY,820,1056,25391594,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92177,SAN DIEGO,CA,PRIMARY,526,766,17444998,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +68465,WILBER,NE,PRIMARY,1075,1951,35088609,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +84053,NEOLA,UT,PRIMARY,310,638,14758583,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +65452,CROCKER,MO,PRIMARY,1352,2524,36433514,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +74084,WYNONA,OK,PRIMARY,252,457,7964992,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +1222,ASHLEY FALLS,MA,PRIMARY,392,681,11545093,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +66605,TOPEKA,KS,PRIMARY,8976,15815,267162620,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +70065,KENNER,LA,PRIMARY,22698,39430,898759433,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92703,SANTA ANA,CA,PRIMARY,24634,48715,640236193,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +23407,MAPPSVILLE,VA,PRIMARY,310,536,7083474,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +72774,WEST FORK,AR,PRIMARY,2710,5051,89189078,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +89021,LOGANDALE,NV,PRIMARY,1303,2624,67641125,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +71857,PRESCOTT,AR,PRIMARY,2449,4526,63666889,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +81025,BOONE,CO,PRIMARY,404,708,9587699,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +13159,TULLY,NY,PRIMARY,2516,4590,110806140,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +6483,SEYMOUR,CT,PRIMARY,8152,14118,397890142,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +99710,FAIRBANKS,AK,PRIMARY,875,1451,40361564,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95210,STOCKTON,CA,PRIMARY,13868,25741,378633657,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27521,COATS,NC,PRIMARY,2478,4701,77262958,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98177,SEATTLE,WA,PRIMARY,10329,17778,621883188,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98231,BLAINE,WA,PRIMARY,776,1290,29343828,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +12417,CONNELLY,NY,PRIMARY,260,432,7600316,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48051,NEW BALTIMORE,MI,PRIMARY,7586,13716,319553500,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +45365,SIDNEY,OH,PRIMARY,14848,25141,478046749,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +71264,OAK RIDGE,LA,PRIMARY,400,743,12612818,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +35961,COLLINSVILLE,AL,PRIMARY,2218,4299,55782841,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +78669,SPICEWOOD,TX,PRIMARY,3910,7063,228739962,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92530,LAKE ELSINORE,CA,PRIMARY,17339,33145,621516645,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +21041,ELLICOTT CITY,MD,PRIMARY,266,423,12028370,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +93591,PALMDALE,CA,PRIMARY,2227,4371,65159544,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +29138,SALUDA,SC,PRIMARY,4300,8004,112078768,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +6793,WASHINGTON,CT,PRIMARY,604,998,37584199,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92056,OCEANSIDE,CA,PRIMARY,24430,42843,944700547,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48465,PALMS,MI,PRIMARY,286,525,6057365,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +40588,LEXINGTON,KY,PRIMARY,335,476,10120217,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +72623,CLARKRIDGE,AR,PRIMARY,275,522,7514330,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +45344,NEW CARLISLE,OH,PRIMARY,8206,14497,272182896,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27298,LIBERTY,NC,PRIMARY,4501,8345,147055675,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +31811,HAMILTON,GA,PRIMARY,1891,3585,84378596,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +72702,FAYETTEVILLE,AR,PRIMARY,989,1544,33042042,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61242,CORDOVA,IL,PRIMARY,608,1057,24052592,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +31211,MACON,GA,PRIMARY,7113,12269,205730150,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38316,BRADFORD,TN,PRIMARY,1250,2322,33223350,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +78040,LAREDO,TX,PRIMARY,15169,28418,290900892,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +51653,TABOR,IA,PRIMARY,586,1082,20565304,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +50448,KENSETT,IA,PRIMARY,333,581,8527006,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +88029,COLUMBUS,NM,PRIMARY,1014,1991,13218650,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +85902,SHOW LOW,AZ,PRIMARY,1964,3587,57252215,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +36035,GOSHEN,AL,PRIMARY,811,1462,22676119,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +52039,DURANGO,IA,PRIMARY,568,1079,20100697,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +47030,METAMORA,IN,PRIMARY,720,1325,18410058,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +11746,HUNTINGTON STATION,NY,PRIMARY,32197,56971,1986757091,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +87023,JARALES,NM,PRIMARY,423,791,12183131,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +58631,GLEN ULLIN,ND,PRIMARY,553,956,14346824,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +42303,OWENSBORO,KY,PRIMARY,16887,30036,591296979,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +47952,KINGMAN,IN,PRIMARY,1251,2261,32821481,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92423,SAN BERNARDINO,CA,PRIMARY,615,1021,25765924,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95982,SUTTER,CA,PRIMARY,1447,2736,58659895,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +79745,KERMIT,TX,PRIMARY,2442,4819,95996386,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +45813,ANTWERP,OH,PRIMARY,1747,3085,53919033,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48105,ANN ARBOR,MI,PRIMARY,15149,25072,901733416,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +18419,FACTORYVILLE,PA,PRIMARY,2179,3759,69764541,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30445,MOUNT VERNON,GA,PRIMARY,1037,1883,26299516,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +11412,SAINT ALBANS,NY,PRIMARY,16657,26508,564187781,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +21617,CENTREVILLE,MD,PRIMARY,4412,8123,220131860,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33826,AVON PARK,FL,PRIMARY,809,1397,19035497,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +41647,MC DOWELL,KY,PRIMARY,639,1224,16876193,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +21773,MYERSVILLE,MD,PRIMARY,2544,4697,137859875,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27824,ENGELHARD,NC,PRIMARY,566,1011,12694135,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +72395,WILSON,AR,PRIMARY,458,818,14062448,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32526,PENSACOLA,FL,PRIMARY,17116,30239,526613084,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +14210,BUFFALO,NY,PRIMARY,6453,10360,160177015,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +7920,BASKING RIDGE,NJ,PRIMARY,13210,24379,1761489724,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60172,ROSELLE,IL,PRIMARY,12831,22378,643440914,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49507,GRAND RAPIDS,MI,PRIMARY,13511,23888,343792530,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +54856,MASON,WI,PRIMARY,734,1336,19961550,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +40601,FRANKFORT,KY,PRIMARY,22938,39539,721803780,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +1237,LANESBORO,MA,PRIMARY,1513,2606,57730107,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +93032,OXNARD,CA,PRIMARY,616,1199,14946777,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28787,WEAVERVILLE,NC,PRIMARY,9202,16686,300749647,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +83541,LENORE,ID,PRIMARY,414,752,9734937,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +14001,AKRON,NY,PRIMARY,4753,8276,167959982,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +68847,KEARNEY,NE,PRIMARY,7703,13199,267124051,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +34604,BROOKSVILLE,FL,PRIMARY,3918,7120,122276944,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +29039,CORDOVA,SC,PRIMARY,1774,3167,42818757,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +18214,BARNESVILLE,PA,PRIMARY,1105,1939,37504004,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +96741,KALAHEO,HI,PRIMARY,2229,3885,88740520,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +14094,LOCKPORT,NY,PRIMARY,24378,41705,834465018,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38175,MEMPHIS,TN,PRIMARY,775,1274,22673894,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +15683,SCOTTDALE,PA,PRIMARY,4072,7090,128711405,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +24569,LONG ISLAND,VA,PRIMARY,420,782,12002292,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60941,HERSCHER,IL,PRIMARY,998,1810,37625072,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32681,ORANGE LAKE,FL,PRIMARY,412,699,9598261,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49689,WELLSTON,MI,PRIMARY,674,1176,13161694,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +53012,CEDARBURG,WI,PRIMARY,9303,16791,510434872,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30722,DALTON,GA,PRIMARY,1045,1873,33156912,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +71030,GLOSTER,LA,PRIMARY,642,1204,19792512,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +51529,DUNLAP,IA,PRIMARY,793,1409,24448432,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38452,CYPRESS INN,TN,PRIMARY,397,763,10933408,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48749,OMER,MI,PRIMARY,549,952,13287085,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +12985,SCHUYLER FALLS,NY,PRIMARY,529,931,16204574,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30710,COHUTTA,GA,PRIMARY,2575,5041,99678926,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +80547,TIMNATH,CO,PRIMARY,314,601,20268910,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +45695,WILKESVILLE,OH,PRIMARY,304,525,7896798,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33483,DELRAY BEACH,FL,PRIMARY,7376,10800,442724945,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +36530,ELBERTA,AL,PRIMARY,3014,5453,82269151,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +19043,HOLMES,PA,PRIMARY,1428,2430,56451132,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +21851,POCOMOKE CITY,MD,PRIMARY,3441,5977,105001707,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27958,MOYOCK,NC,PRIMARY,4133,8123,177994696,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30327,ATLANTA,GA,PRIMARY,16627,24665,1565488324,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +10462,BRONX,NY,PRIMARY,35620,57854,1197519842,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +8861,PERTH AMBOY,NJ,PRIMARY,24814,41825,689264920,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +4217,BETHEL,ME,PRIMARY,1595,2781,45001237,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +64801,JOPLIN,MO,PRIMARY,13998,23939,396766346,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +43952,STEUBENVILLE,OH,PRIMARY,7755,12725,202735860,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +77223,HOUSTON,TX,PRIMARY,267,466,7000057,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +6825,FAIRFIELD,CT,PRIMARY,9355,16768,638307823,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37708,BEAN STATION,TN,PRIMARY,2661,4967,67890037,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +13165,WATERLOO,NY,PRIMARY,5168,8844,147657152,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +46710,AVILLA,IN,PRIMARY,2173,4010,77137244,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49404,COOPERSVILLE,MI,PRIMARY,3812,7010,131629549,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +65337,LA MONTE,MO,PRIMARY,847,1642,22626986,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +15436,FAIRCHANCE,PA,PRIMARY,1267,2180,33272489,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +2116,BOSTON,MA,PRIMARY,10214,13687,1351420353,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +16877,WARRIORS MARK,PA,PRIMARY,802,1474,33638459,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +96701,AIEA,HI,PRIMARY,21530,35697,861344249,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +82223,LINGLE,WY,PRIMARY,462,857,13324885,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +16671,RAMEY,PA,PRIMARY,266,466,7259127,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +42040,FARMINGTON,KY,PRIMARY,465,896,11562973,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +36010,BRUNDIDGE,AL,PRIMARY,2159,3931,55475515,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +12185,VALLEY FALLS,NY,PRIMARY,964,1709,38659386,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +12873,SHUSHAN,NY,PRIMARY,376,639,10792237,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98520,ABERDEEN,WA,PRIMARY,9023,15882,281981527,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +53537,FOOTVILLE,WI,PRIMARY,332,582,10085158,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62548,MOUNT PULASKI,IL,PRIMARY,1108,1967,37112435,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +12066,ESPERANCE,NY,PRIMARY,1010,1819,40371993,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +39288,PEARL,MS,PRIMARY,412,688,11426134,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +76127,NAVAL AIR STATION/ JRB,TX,PRIMARY,262,405,9208585,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +63469,SHELBYVILLE,MO,PRIMARY,576,1032,12751737,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +15870,WILCOX,PA,PRIMARY,685,1150,22281572,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +80904,COLORADO SPRINGS,CO,PRIMARY,9987,15460,300137463,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27510,CARRBORO,NC,PRIMARY,7200,10912,250212722,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +21090,LINTHICUM HEIGHTS,MD,PRIMARY,5033,8675,243858620,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +65759,TANEYVILLE,MO,PRIMARY,500,918,11018545,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +97216,PORTLAND,OR,PRIMARY,7125,11628,197896621,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +19545,NEW BERLINVILLE,PA,PRIMARY,305,497,9384985,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +85329,CASHION,AZ,PRIMARY,1256,2530,31843702,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +75902,LUFKIN,TX,PRIMARY,587,988,17370778,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +22645,MIDDLETOWN,VA,PRIMARY,1744,3154,75454601,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +31636,LAKE PARK,GA,PRIMARY,3811,7161,128630584,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +67156,WINFIELD,KS,PRIMARY,6794,11851,210044044,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37337,GRANDVIEW,TN,PRIMARY,591,1097,14924501,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38345,HURON,TN,PRIMARY,771,1489,25579967,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +34489,SILVER SPRINGS,FL,PRIMARY,649,1089,15507565,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +57078,YANKTON,SD,PRIMARY,8799,15005,286326125,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +57259,REVILLO,SD,PRIMARY,250,447,5406567,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49010,ALLEGAN,MI,PRIMARY,7887,14231,255543875,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +39647,MC CALL CREEK,MS,PRIMARY,437,871,14290330,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +93121,SANTA BARBARA,CA,PRIMARY,631,901,21837923,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +8750,SEA GIRT,NJ,PRIMARY,2054,3480,175134188,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +59911,BIGFORK,MT,PRIMARY,3770,6546,93872021,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95378,TRACY,CA,PRIMARY,576,1002,18653694,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38326,COUNCE,TN,PRIMARY,1036,1859,33972097,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +24068,CHRISTIANSBURG,VA,PRIMARY,538,875,15781170,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +76060,KENNEDALE,TX,PRIMARY,3234,5925,152428234,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +83217,BANCROFT,ID,PRIMARY,327,641,9211460,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +25309,CHARLESTON,WV,PRIMARY,5798,9969,208657138,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +50441,HAMPTON,IA,PRIMARY,2591,4742,75598555,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33731,SAINT PETERSBURG,FL,PRIMARY,503,684,10738465,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +75439,ECTOR,TX,PRIMARY,387,712,11703480,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +93555,RIDGECREST,CA,PRIMARY,13159,24004,537637961,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61368,TISKILWA,IL,PRIMARY,706,1283,19637024,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +58573,STRASBURG,ND,PRIMARY,277,486,5403975,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +34289,NORTH PORT,FL,PRIMARY,784,1436,21534354,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +24128,NEWPORT,VA,PRIMARY,892,1597,31754246,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +89030,NORTH LAS VEGAS,NV,PRIMARY,19128,37689,483264421,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +76543,KILLEEN,TX,PRIMARY,12391,22469,329515990,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +13442,ROME,NY,PRIMARY,627,936,18582419,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +46933,GAS CITY,IN,PRIMARY,3142,5559,88969193,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61114,ROCKFORD,IL,PRIMARY,8040,14122,393482620,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +10930,HIGHLAND MILLS,NY,PRIMARY,4153,7893,283230231,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +97528,GRANTS PASS,OR,PRIMARY,1105,1806,25912381,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38174,MEMPHIS,TN,PRIMARY,342,486,10914282,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60416,COAL CITY,IL,PRIMARY,4486,8082,194706304,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92867,ORANGE,CA,PRIMARY,18747,34563,998345482,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62202,EAST SAINT LOUIS,IL,PRIMARY,317,497,6182150,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60712,LINCOLNWOOD,IL,PRIMARY,6255,10969,286950954,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +51652,SIDNEY,IA,PRIMARY,827,1496,24856756,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +97477,SPRINGFIELD,OR,PRIMARY,15926,26525,431535808,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37320,CLEVELAND,TN,PRIMARY,1203,1970,38764839,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61238,CAMBRIDGE,IL,PRIMARY,1480,2714,45977626,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48754,OWENDALE,MI,PRIMARY,456,820,11784555,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +74930,BOKOSHE,OK,PRIMARY,673,1291,18690536,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +67441,ENTERPRISE,KS,PRIMARY,497,912,14533810,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +31629,DIXIE,GA,PRIMARY,370,683,8799045,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61856,MONTICELLO,IL,PRIMARY,3755,6850,171499127,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +72047,ENOLA,AR,PRIMARY,396,787,14174112,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60930,DANFORTH,IL,PRIMARY,396,708,13032029,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48220,FERNDALE,MI,PRIMARY,11871,17626,420915748,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +99102,ALBION,WA,PRIMARY,290,504,9291544,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92061,PAUMA VALLEY,CA,PRIMARY,1337,2444,40982223,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +83128,ALPINE,WY,PRIMARY,764,1355,29296840,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28763,OTTO,NC,PRIMARY,1136,2070,25903242,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +11420,SOUTH OZONE PARK,NY,PRIMARY,19959,34717,644033694,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32766,OVIEDO,FL,PRIMARY,6170,12531,392771982,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +2038,FRANKLIN,MA,PRIMARY,14611,27220,1034537207,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +55117,SAINT PAUL,MN,PRIMARY,18640,30600,646795138,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +57334,ETHAN,SD,PRIMARY,396,734,10825971,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38141,MEMPHIS,TN,PRIMARY,10815,18497,327860921,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37130,MURFREESBORO,TN,PRIMARY,20872,34702,680894938,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +78582,RIO GRANDE CITY,TX,PRIMARY,11976,24672,299540066,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +86505,GANADO,AZ,PRIMARY,1444,2610,40221537,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +58545,HAZEN,ND,PRIMARY,1750,3178,72840362,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +14872,PINE VALLEY,NY,PRIMARY,286,506,7136468,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +70538,FRANKLIN,LA,PRIMARY,5787,10497,166874065,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +55709,BOVEY,MN,PRIMARY,1764,3212,56893247,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49445,MUSKEGON,MI,PRIMARY,10135,18427,372164205,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33414,WELLINGTON,FL,PRIMARY,23608,43745,1290177589,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +71061,OIL CITY,LA,PRIMARY,707,1302,22014531,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +47665,OWENSVILLE,IN,PRIMARY,1639,3093,61388965,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +77080,HOUSTON,TX,PRIMARY,16177,29608,482815586,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +20773,UPPER MARLBORO,MD,PRIMARY,344,513,17558570,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +93030,OXNARD,CA,PRIMARY,23624,45834,782668499,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61816,BROADLANDS,IL,PRIMARY,254,456,8235627,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +16117,ELLWOOD CITY,PA,PRIMARY,8495,14648,268573628,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +58237,GRAFTON,ND,PRIMARY,2657,4532,74575194,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92174,SAN DIEGO,CA,PRIMARY,518,868,14406180,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60651,CHICAGO,IL,PRIMARY,27734,47832,634299811,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38310,ADAMSVILLE,TN,PRIMARY,2385,4536,64717478,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +93271,THREE RIVERS,CA,PRIMARY,1086,1879,37368070,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +15958,SUMMERHILL,PA,PRIMARY,1077,1906,33870823,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +87930,ARREY,NM,PRIMARY,289,551,5016012,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +39826,BRONWOOD,GA,PRIMARY,313,583,7549480,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +68801,GRAND ISLAND,NE,PRIMARY,12518,22157,382091286,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49256,MORENCI,MI,PRIMARY,1737,3219,50500729,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +70345,CUT OFF,LA,PRIMARY,4528,8375,196394021,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +64138,KANSAS CITY,MO,PRIMARY,12147,20286,384747364,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +26030,BEECH BOTTOM,WV,PRIMARY,275,471,6643775,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +75148,MALAKOFF,TX,PRIMARY,2343,4306,64709148,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +44721,CANTON,OH,PRIMARY,6709,11678,271063176,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +21662,ROYAL OAK,MD,PRIMARY,415,722,23020481,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +67480,SOLOMON,KS,PRIMARY,854,1560,29373470,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +79313,ANTON,TX,PRIMARY,719,1332,19098172,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +63039,GRAY SUMMIT,MO,PRIMARY,454,790,15359648,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +4976,SKOWHEGAN,ME,PRIMARY,4356,7550,135684772,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28209,CHARLOTTE,NC,PRIMARY,10330,15675,592214123,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +19043,HOLMES,PA,PRIMARY,1428,2430,56451132,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +36474,RED LEVEL,AL,PRIMARY,1169,2154,32034399,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +52746,DONAHUE,IA,PRIMARY,449,843,18427282,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +21209,BALTIMORE,MD,PRIMARY,12434,20922,678390490,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +56438,BROWERVILLE,MN,PRIMARY,1361,2416,40252970,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +73012,EDMOND,OK,PRIMARY,9146,18114,608520099,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +75758,CHANDLER,TX,PRIMARY,4113,7626,136403326,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +66616,TOPEKA,KS,PRIMARY,2959,4783,72406615,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49010,ALLEGAN,MI,PRIMARY,7887,14231,255543875,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +41183,WORTHINGTON,KY,PRIMARY,724,1368,23088847,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +10305,STATEN ISLAND,NY,PRIMARY,18275,31760,828916882,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +57235,FLORENCE,SD,PRIMARY,396,726,10740439,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33082,PEMBROKE PINES,FL,PRIMARY,560,981,27177584,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +23050,DUTTON,VA,PRIMARY,346,640,13112053,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33169,MIAMI,FL,PRIMARY,19270,32158,552245685,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62817,BROUGHTON,IL,PRIMARY,253,472,5538551,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +36081,TROY,AL,PRIMARY,5891,10180,166027036,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +67017,BENTON,KS,PRIMARY,1026,1909,51249220,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +54977,SCANDINAVIA,WI,PRIMARY,658,1175,20343179,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +19382,WEST CHESTER,PA,PRIMARY,24193,41450,1687695566,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +46135,GREENCASTLE,IN,PRIMARY,7528,13457,238968366,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +22959,NORTH GARDEN,VA,PRIMARY,828,1457,31845926,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28757,MONTREAT,NC,PRIMARY,290,481,7421358,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +29672,SENECA,SC,PRIMARY,5106,9293,224341019,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +4001,ACTON,ME,PRIMARY,1090,1934,35436412,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +55355,LITCHFIELD,MN,PRIMARY,4532,8039,150533896,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +41144,GREENUP,KY,PRIMARY,4023,7836,129737364,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +63626,BLACKWELL,MO,PRIMARY,375,729,11866843,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +29680,SIMPSONVILLE,SC,PRIMARY,11384,21575,517070798,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +3885,STRATHAM,NH,PRIMARY,3831,6872,251004191,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +59868,SEELEY LAKE,MT,PRIMARY,1000,1732,21663660,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98923,COWICHE,WA,PRIMARY,711,1380,20766657,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +15767,PUNXSUTAWNEY,PA,PRIMARY,6626,11583,181481699,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +99587,GIRDWOOD,AK,PRIMARY,1306,1911,51471481,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +47850,FARMERSBURG,IN,PRIMARY,1173,2120,37762044,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +23921,BUCKINGHAM,VA,PRIMARY,840,1472,23037440,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +13491,WEST WINFIELD,NY,PRIMARY,1664,2989,52730992,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +76579,TROY,TX,PRIMARY,1819,3427,64436696,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +93513,BIG PINE,CA,PRIMARY,817,1370,26299862,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +67119,OXFORD,KS,PRIMARY,679,1255,23959099,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27243,EFLAND,NC,PRIMARY,2040,3745,94542197,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +73568,TEMPLE,OK,PRIMARY,462,849,12171506,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +97055,SANDY,OR,PRIMARY,7530,13811,296096266,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +78417,CORPUS CHRISTI,TX,PRIMARY,1777,3233,45661931,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +81006,PUEBLO,CO,PRIMARY,5219,9410,168254244,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +45502,SPRINGFIELD,OH,PRIMARY,8389,14878,325037911,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +75760,CUSHING,TX,PRIMARY,1048,1897,29312959,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +83323,DECLO,ID,PRIMARY,683,1394,20220582,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +68801,GRAND ISLAND,NE,PRIMARY,12518,22157,382091286,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +94002,BELMONT,CA,PRIMARY,13324,23092,1076478920,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +72471,SWIFTON,AR,PRIMARY,394,736,9639917,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +66109,KANSAS CITY,KS,PRIMARY,9730,17446,433460543,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +80814,DIVIDE,CO,PRIMARY,1832,3423,76203537,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +20120,CENTREVILLE,VA,PRIMARY,19282,35432,1368248118,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98840,OKANOGAN,WA,PRIMARY,1887,3339,53770984,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +24224,CASTLEWOOD,VA,PRIMARY,2164,4093,64660388,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +8855,PISCATAWAY,NJ,PRIMARY,286,428,12193710,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +45323,ENON,OH,PRIMARY,2734,4727,94452916,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48386,WHITE LAKE,MI,PRIMARY,8392,14995,390237839,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +94403,SAN MATEO,CA,PRIMARY,19779,33305,1237433782,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30522,CHERRY LOG,GA,PRIMARY,500,889,12505343,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +56374,SAINT JOSEPH,MN,PRIMARY,3769,6660,148662630,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +99011,FAIRCHILD AIR FORCE BASE,WA,PRIMARY,1127,2244,34707491,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +71941,DONALDSON,AR,PRIMARY,766,1479,21580809,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98027,ISSAQUAH,WA,PRIMARY,13620,24023,1002486654,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +35175,UNION GROVE,AL,PRIMARY,2205,4185,81231083,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +26705,AURORA,WV,PRIMARY,488,880,13031295,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +35094,LEEDS,AL,PRIMARY,6371,11438,235783525,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +43314,CALEDONIA,OH,PRIMARY,1475,2603,47618321,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48889,SUMNER,MI,PRIMARY,620,1188,17974622,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +97361,MONMOUTH,OR,PRIMARY,4168,7320,133670416,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49233,CEMENT CITY,MI,PRIMARY,1257,2256,42359689,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +23460,VIRGINIA BEACH,VA,PRIMARY,512,563,9266008,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62638,FRANKLIN,IL,PRIMARY,699,1239,21986983,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +23921,BUCKINGHAM,VA,PRIMARY,840,1472,23037440,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +68045,OAKLAND,NE,PRIMARY,830,1533,25052945,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +81005,PUEBLO,CO,PRIMARY,13878,24381,443382936,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +65713,NIANGUA,MO,PRIMARY,972,1808,23584811,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32777,TANGERINE,FL,PRIMARY,281,509,8962046,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95688,VACAVILLE,CA,PRIMARY,15268,28085,764285035,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +1474,WEST TOWNSEND,MA,PRIMARY,1035,1866,50583574,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +70729,ERWINVILLE,LA,PRIMARY,543,997,21106540,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61233,ANDOVER,IL,PRIMARY,269,501,9481373,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +43515,DELTA,OH,PRIMARY,3880,6891,125377974,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +14132,SANBORN,NY,PRIMARY,2861,5028,97045301,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +19601,READING,PA,PRIMARY,13371,22537,277797320,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38666,SARDIS,MS,PRIMARY,2812,5090,64598376,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +71913,HOT SPRINGS NATIONAL PARK,AR,PRIMARY,18793,32485,515123605,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38577,PALL MALL,TN,PRIMARY,436,821,11063705,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +43136,LITHOPOLIS,OH,PRIMARY,570,1015,22252944,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +7460,STOCKHOLM,NJ,PRIMARY,1747,3178,99498648,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +15019,BULGER,PA,PRIMARY,830,1386,26038890,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +97760,TERREBONNE,OR,PRIMARY,3089,5621,84167568,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +68070,WESTON,NE,PRIMARY,391,718,11761161,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +57339,FORT THOMPSON,SD,PRIMARY,518,969,9643572,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +19122,PHILADELPHIA,PA,PRIMARY,6297,9856,136619959,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +29451,ISLE OF PALMS,SC,PRIMARY,2203,3790,133836057,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +29696,WEST UNION,SC,PRIMARY,1953,3582,65851508,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49455,SHELBY,MI,PRIMARY,2227,4142,58487281,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +83839,KINGSTON,ID,PRIMARY,550,1005,16892199,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +22313,ALEXANDRIA,VA,PRIMARY,364,478,16842666,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +56385,VILLARD,MN,PRIMARY,460,820,12949042,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +58077,WALCOTT,ND,PRIMARY,362,685,14291418,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +96779,PAIA,HI,PRIMARY,2087,3245,56268426,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +94506,DANVILLE,CA,PRIMARY,10418,20743,1408422912,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +84511,BLANDING,UT,PRIMARY,1476,2906,48325188,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +14853,ITHACA,NY,PRIMARY,302,342,5064499,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +66015,COLONY,KS,PRIMARY,374,670,9028170,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +13676,POTSDAM,NY,PRIMARY,5068,8585,171680430,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +68978,SUPERIOR,NE,PRIMARY,1155,1993,25405139,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +66103,KANSAS CITY,KS,PRIMARY,5915,9428,158348574,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +59070,ROBERTS,MT,PRIMARY,481,863,14303335,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +18405,BEACH LAKE,PA,PRIMARY,1130,1980,31230308,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98051,RAVENSDALE,WA,PRIMARY,1734,3087,97853859,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49072,MENDON,MI,PRIMARY,1467,2691,47565379,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30153,ROCKMART,GA,PRIMARY,7295,13703,235743491,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +21146,SEVERNA PARK,MD,PRIMARY,12624,23707,902644194,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +12168,STEPHENTOWN,NY,PRIMARY,958,1639,32261983,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28683,THURMOND,NC,PRIMARY,691,1294,17446767,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +82190,YELLOWSTONE NATIONAL PARK,WY,PRIMARY,358,515,14677047,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +35130,QUINTON,AL,PRIMARY,1280,2403,41987238,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +40068,SMITHFIELD,KY,PRIMARY,1050,1973,42721880,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +1982,SOUTH HAMILTON,MA,PRIMARY,3513,6501,296420268,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +85625,PEARCE,AZ,PRIMARY,852,1462,16435242,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +55025,FOREST LAKE,MN,PRIMARY,11050,19742,513278711,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95926,CHICO,CA,PRIMARY,14130,22651,418715754,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +94938,LAGUNITAS,CA,PRIMARY,302,479,8857201,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +87375,YATAHEY,NM,PRIMARY,635,1082,14540511,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +29691,WALHALLA,SC,PRIMARY,4416,8224,120258867,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +89803,ELKO,NV,PRIMARY,968,1627,47860883,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +78659,PAIGE,TX,PRIMARY,1191,2142,40635886,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +55130,SAINT PAUL,MN,PRIMARY,5969,10092,144491142,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33673,TAMPA,FL,PRIMARY,276,429,6481567,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +76470,RANGER,TX,PRIMARY,1212,2140,27806648,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +4684,SURRY,ME,PRIMARY,736,1252,21803755,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +54138,LAKEWOOD,WI,PRIMARY,507,866,10298781,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92395,VICTORVILLE,CA,PRIMARY,14227,26559,440358531,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +67449,HERINGTON,KS,PRIMARY,1465,2585,40048600,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33493,SOUTH BAY,FL,PRIMARY,1267,2306,31364205,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +67466,MILTONVALE,KS,PRIMARY,333,601,6826313,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +68020,DECATUR,NE,PRIMARY,363,618,9241891,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +78339,BANQUETE,TX,PRIMARY,466,894,14211551,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +85396,BUCKEYE,AZ,PRIMARY,4286,8428,214489002,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +14572,WAYLAND,NY,PRIMARY,2356,4137,72787411,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +29250,COLUMBIA,SC,PRIMARY,285,418,11366477,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +17059,MIFFLINTOWN,PA,PRIMARY,3379,6026,100090251,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +24330,FRIES,VA,PRIMARY,1378,2514,31753875,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +36542,GULF SHORES,AL,PRIMARY,4773,8103,126030692,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +15357,RICES LANDING,PA,PRIMARY,765,1350,25078384,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +39667,TYLERTOWN,MS,PRIMARY,4995,9415,132768382,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +35953,ASHVILLE,AL,PRIMARY,3038,5784,98716002,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +4780,SMYRNA MILLS,ME,PRIMARY,302,573,6894049,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60626,CHICAGO,IL,PRIMARY,21612,32581,763896673,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +42078,SALEM,KY,PRIMARY,763,1447,21640714,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48457,MONTROSE,MI,PRIMARY,3791,6924,106989146,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +4982,STRATTON,ME,PRIMARY,302,509,7559592,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32221,JACKSONVILLE,FL,PRIMARY,12119,22388,457633744,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +14042,DELEVAN,NY,PRIMARY,1718,3060,50164400,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +46759,KEYSTONE,IN,PRIMARY,269,506,7290814,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27116,WINSTON SALEM,NC,PRIMARY,275,425,6511545,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +90022,LOS ANGELES,CA,PRIMARY,24562,46198,622984671,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +35045,CLANTON,AL,PRIMARY,5398,10205,170447908,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +6117,WEST HARTFORD,CT,PRIMARY,7295,13211,599591815,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +11977,WESTHAMPTON,NY,PRIMARY,1221,2110,73879423,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +96142,TAHOMA,CA,PRIMARY,359,590,10865121,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +29388,WOODRUFF,SC,PRIMARY,5829,11048,196764372,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +53158,PLEASANT PRAIRIE,WI,PRIMARY,7465,13763,416865360,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +7057,WALLINGTON,NJ,PRIMARY,5850,9893,215940992,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +8873,SOMERSET,NJ,PRIMARY,24909,42921,1399665021,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +90059,LOS ANGELES,CA,PRIMARY,13711,25507,310497390,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +44410,CORTLAND,OH,PRIMARY,9291,15757,317805586,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62225,SCOTT AIR FORCE BASE,IL,PRIMARY,1845,4091,77592935,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +80743,OTIS,CO,PRIMARY,474,874,12580454,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62920,COBDEN,IL,PRIMARY,1431,2573,40916944,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +90702,ARTESIA,CA,PRIMARY,341,540,13296043,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +40906,BARBOURVILLE,KY,PRIMARY,3781,7188,100928544,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37366,PELHAM,TN,PRIMARY,372,687,9833180,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +19115,PHILADELPHIA,PA,PRIMARY,15881,26806,584521712,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +52756,LONG GROVE,IA,PRIMARY,1059,2008,49143103,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +12053,DELANSON,NY,PRIMARY,2093,3842,99351136,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +56425,BAXTER,MN,PRIMARY,3510,6468,139021777,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +77856,FRANKLIN,TX,PRIMARY,1882,3468,60191662,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +3576,COLEBROOK,NH,PRIMARY,1644,2776,45479318,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +29681,SIMPSONVILLE,SC,PRIMARY,20606,40234,1200442556,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +11368,CORONA,NY,PRIMARY,39840,67717,892353230,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49747,HUBBARD LAKE,MI,PRIMARY,872,1525,16331551,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +96822,HONOLULU,HI,PRIMARY,21430,32666,843183964,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +25638,OMAR,WV,PRIMARY,397,777,11724824,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +5462,HUNTINGTON,VT,PRIMARY,958,1733,41923754,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +25506,BRANCHLAND,WV,PRIMARY,1526,2987,42199436,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +16828,CENTRE HALL,PA,PRIMARY,2204,3930,72939133,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +64130,KANSAS CITY,MO,PRIMARY,8872,14296,181743514,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +25557,RANGER,WV,PRIMARY,494,971,13222684,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +3076,PELHAM,NH,PRIMARY,6433,11839,366493993,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +10475,BRONX,NY,PRIMARY,21124,31659,740068715,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +4488,STETSON,ME,PRIMARY,519,944,16712172,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +71430,FOREST HILL,LA,PRIMARY,968,1848,30126708,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +36580,SUMMERDALE,AL,PRIMARY,2263,4095,61493346,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +44040,GATES MILLS,OH,PRIMARY,1859,3144,220009031,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +20748,TEMPLE HILLS,MD,PRIMARY,19163,30423,735793244,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +56551,HENNING,MN,PRIMARY,957,1748,22376564,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +46173,RUSHVILLE,IN,PRIMARY,5133,9157,145188539,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +79110,AMARILLO,TX,PRIMARY,8483,15253,283911288,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +29727,MOUNT CROGHAN,SC,PRIMARY,569,1078,13728394,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +83332,HAGERMAN,ID,PRIMARY,1018,1848,24093449,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +14043,DEPEW,NY,PRIMARY,13172,21518,430002941,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62223,BELLEVILLE,IL,PRIMARY,9236,15546,369657037,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98320,BRINNON,WA,PRIMARY,551,924,10262619,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +50022,ATLANTIC,IA,PRIMARY,4087,7112,113076453,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +72455,POCAHONTAS,AR,PRIMARY,5000,9253,121599670,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +59875,VICTOR,MT,PRIMARY,1663,2927,38412853,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +3784,WEST LEBANON,NH,PRIMARY,2089,3344,82514482,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +3079,SALEM,NH,PRIMARY,15171,26049,714089826,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98586,SOUTH BEND,WA,PRIMARY,929,1730,24773724,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +23707,PORTSMOUTH,VA,PRIMARY,6615,11025,202037015,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +79606,ABILENE,TX,PRIMARY,10401,18819,415666682,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98251,GOLD BAR,WA,PRIMARY,2081,3652,81231650,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +47567,PETERSBURG,IN,PRIMARY,2719,4936,90076257,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61469,OQUAWKA,IL,PRIMARY,1048,1798,27841747,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +8346,NEWTONVILLE,NJ,PRIMARY,419,717,13139452,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +8009,BERLIN,NJ,PRIMARY,6593,11467,308358424,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98360,ORTING,WA,PRIMARY,5584,10462,262496781,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62420,CASEY,IL,PRIMARY,2295,4031,59695180,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +91763,MONTCLAIR,CA,PRIMARY,14454,27063,453244725,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48173,ROCKWOOD,MI,PRIMARY,6080,11095,268466610,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +36035,GOSHEN,AL,PRIMARY,811,1462,22676119,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +41425,EZEL,KY,PRIMARY,390,801,10204009,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +16210,ADRIAN,PA,PRIMARY,455,843,13678147,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +18913,CARVERSVILLE,PA,PRIMARY,253,421,32900007,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +43528,HOLLAND,OH,PRIMARY,8008,13829,385634634,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +31551,MERSHON,GA,PRIMARY,352,697,10201883,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30156,KENNESAW,GA,PRIMARY,503,763,15618680,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +71405,BALL,LA,PRIMARY,2433,4354,76939911,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +64834,CARL JUNCTION,MO,PRIMARY,3951,7698,159219087,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38858,NETTLETON,MS,PRIMARY,2783,5263,72541372,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +17365,WELLSVILLE,PA,PRIMARY,1311,2344,52832508,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +73080,PURCELL,OK,PRIMARY,3789,7043,127744932,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +18030,BOWMANSTOWN,PA,PRIMARY,315,535,9387111,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37143,PEGRAM,TN,PRIMARY,1851,3423,73157985,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +41071,NEWPORT,KY,PRIMARY,9108,14293,295339416,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +63532,BEVIER,MO,PRIMARY,607,1082,14533303,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +11763,MEDFORD,NY,PRIMARY,13590,23711,649126962,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +97870,RICHLAND,OR,PRIMARY,269,457,3872036,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +67467,MINNEAPOLIS,KS,PRIMARY,1410,2491,42323337,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +80235,DENVER,CO,PRIMARY,3720,6000,149636214,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +11954,MONTAUK,NY,PRIMARY,2507,3976,83023570,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +2910,CRANSTON,RI,PRIMARY,11193,18350,395506404,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +99581,EMMONAK,AK,PRIMARY,322,545,6049925,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +6269,STORRS MANSFIELD,CT,PRIMARY,375,384,5069622,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +17212,BIG COVE TANNERY,PA,PRIMARY,281,538,9425445,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +41139,FLATWOODS,KY,PRIMARY,3692,6748,121902277,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95307,CERES,CA,PRIMARY,15983,30984,503604712,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92656,ALISO VIEJO,CA,PRIMARY,22348,39419,1542655701,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +5494,WESTFORD,VT,PRIMARY,870,1555,41720931,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95549,KNEELAND,CA,PRIMARY,350,635,10673232,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +26178,SMITHVILLE,WV,PRIMARY,271,487,5849046,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +14621,ROCHESTER,NY,PRIMARY,12188,20021,235030215,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +97457,MYRTLE CREEK,OR,PRIMARY,3951,7128,100415178,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +46237,INDIANAPOLIS,IN,PRIMARY,18892,33325,807583985,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92261,PALM DESERT,CA,PRIMARY,1163,1814,30976723,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +7102,NEWARK,NJ,PRIMARY,3651,5503,135280553,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33015,HIALEAH,FL,PRIMARY,27904,48677,913467912,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +34112,NAPLES,FL,PRIMARY,11854,19197,289148450,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32040,GLEN SAINT MARY,FL,PRIMARY,3297,6294,115147099,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +54937,FOND DU LAC,WI,PRIMARY,7718,13861,341453239,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +72443,MARMADUKE,AR,PRIMARY,1105,2148,28994684,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +11230,BROOKLYN,NY,PRIMARY,32295,56454,1182130112,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +13132,PENNELLVILLE,NY,PRIMARY,1968,3516,72968697,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98232,BOW,WA,PRIMARY,1955,3469,77454557,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +11933,CALVERTON,NY,PRIMARY,3242,5527,138189454,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +75455,MOUNT PLEASANT,TX,PRIMARY,10164,19644,316107076,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28815,ASHEVILLE,NC,PRIMARY,557,836,14421870,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +14167,VARYSBURG,NY,PRIMARY,795,1383,27640706,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +55012,CENTER CITY,MN,PRIMARY,937,1720,40798861,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38650,MYRTLE,MS,PRIMARY,1498,2838,39922440,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +74883,WETUMKA,OK,PRIMARY,987,1789,25388758,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +76801,BROWNWOOD,TX,PRIMARY,9898,17792,279370637,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +63437,CLARENCE,MO,PRIMARY,706,1261,15178971,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33323,FORT LAUDERDALE,FL,PRIMARY,9140,16261,456414551,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +47991,WEST LEBANON,IN,PRIMARY,487,901,15316403,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92706,SANTA ANA,CA,PRIMARY,14635,27773,521280485,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +26855,CABINS,WV,PRIMARY,319,590,8662344,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +15928,DAVIDSVILLE,PA,PRIMARY,990,1745,29637638,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +20876,GERMANTOWN,MD,PRIMARY,11707,20992,662408800,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +57276,WHITE,SD,PRIMARY,531,955,16098630,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48453,MARLETTE,MI,PRIMARY,2295,4196,57824700,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95677,ROCKLIN,CA,PRIMARY,11381,20132,536083455,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +36560,MOUNT VERNON,AL,PRIMARY,1377,2564,33969230,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +47932,COVINGTON,IN,PRIMARY,2535,4661,84945611,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +84039,LAPOINT,UT,PRIMARY,391,772,17768323,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +8536,PLAINSBORO,NJ,PRIMARY,9397,17344,753685514,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33165,MIAMI,FL,PRIMARY,26643,44352,771792330,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38039,GRAND JUNCTION,TN,PRIMARY,906,1629,21642549,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27874,SCOTLAND NECK,NC,PRIMARY,1980,3455,42200986,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95361,OAKDALE,CA,PRIMARY,13130,24557,550548895,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +47523,DALE,IN,PRIMARY,1542,2768,49588107,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +24603,BIG ROCK,VA,PRIMARY,446,851,13562279,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +84011,BOUNTIFUL,UT,PRIMARY,441,746,14631744,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30563,MOUNT AIRY,GA,PRIMARY,2198,4196,72676015,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +75652,HENDERSON,TX,PRIMARY,5285,9772,174907560,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +5089,WINDSOR,VT,PRIMARY,2134,3525,71543063,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +3444,DUBLIN,NH,PRIMARY,781,1417,32765403,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98903,YAKIMA,WA,PRIMARY,6345,11510,198636282,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +90632,LA HABRA,CA,PRIMARY,404,673,15915967,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +46076,WINDFALL,IN,PRIMARY,781,1447,25002028,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +64056,INDEPENDENCE,MO,PRIMARY,7307,13063,252906377,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +39296,JACKSON,MS,PRIMARY,251,378,9875015,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +36756,MARION,AL,PRIMARY,2407,4346,53117958,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +75091,SHERMAN,TX,PRIMARY,992,1675,36634282,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +21742,HAGERSTOWN,MD,PRIMARY,14688,25720,651749820,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48374,NOVI,MI,PRIMARY,6185,12379,572559561,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +20147,ASHBURN,VA,PRIMARY,23767,45259,2005708619,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +16130,HADLEY,PA,PRIMARY,992,1726,25303235,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +41042,FLORENCE,KY,PRIMARY,23212,40441,919294532,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +3234,EPSOM,NH,PRIMARY,2357,4174,96794392,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95357,MODESTO,CA,PRIMARY,5474,10140,215924416,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +66510,MELVERN,KS,PRIMARY,357,661,11195502,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +75644,GILMER,TX,PRIMARY,4953,9164,145792063,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +72419,CARAWAY,AR,PRIMARY,701,1332,17355451,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +35587,TOWNLEY,AL,PRIMARY,361,693,10770532,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30172,SHANNON,GA,PRIMARY,496,878,11388468,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +14006,ANGOLA,NY,PRIMARY,4925,8348,161988353,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +15342,HOUSTON,PA,PRIMARY,2513,4211,87932376,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37727,DEL RIO,TN,PRIMARY,824,1541,18971575,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +83522,COTTONWOOD,ID,PRIMARY,835,1502,22103548,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +87522,CHIMAYO,NM,PRIMARY,1335,2265,37296044,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49643,INTERLOCHEN,MI,PRIMARY,2969,5269,90518408,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +54653,ROCKLAND,WI,PRIMARY,484,883,16064109,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +40175,VINE GROVE,KY,PRIMARY,5645,10700,187414483,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +75790,VAN,TX,PRIMARY,1920,3609,63811063,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +63033,FLORISSANT,MO,PRIMARY,21268,35384,682026273,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92277,TWENTYNINE PALMS,CA,PRIMARY,8483,16226,254665955,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +55072,SANDSTONE,MN,PRIMARY,1500,2569,38252125,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +3580,FRANCONIA,NH,PRIMARY,830,1362,28818933,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +72047,ENOLA,AR,PRIMARY,396,787,14174112,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +18439,LAKEWOOD,PA,PRIMARY,250,433,7347310,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +64790,WALKER,MO,PRIMARY,353,651,6840515,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +45654,NEW PLYMOUTH,OH,PRIMARY,364,679,9368509,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +45505,SPRINGFIELD,OH,PRIMARY,8745,14526,208180984,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +4259,MONMOUTH,ME,PRIMARY,1488,2764,54751262,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +75839,ELKHART,TX,PRIMARY,2145,3985,70107398,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +78130,NEW BRAUNFELS,TX,PRIMARY,26382,47380,915619654,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +58482,STEELE,ND,PRIMARY,471,834,11805631,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33414,WELLINGTON,FL,PRIMARY,23608,43745,1290177589,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +83352,SHOSHONE,ID,PRIMARY,1247,2411,38485858,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +79916,FORT BLISS,TX,PRIMARY,1668,1803,35153258,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38138,GERMANTOWN,TN,PRIMARY,12287,22137,867998325,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27712,DURHAM,NC,PRIMARY,9696,17922,509535483,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +17065,MOUNT HOLLY SPRINGS,PA,PRIMARY,2146,3792,77562605,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49930,HANCOCK,MI,PRIMARY,2867,4781,84280969,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +90061,LOS ANGELES,CA,PRIMARY,10076,18477,238903309,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +45843,FOREST,OH,PRIMARY,1752,3140,55481075,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49841,GWINN,MI,PRIMARY,2866,5127,75735495,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +78755,AUSTIN,TX,PRIMARY,475,706,20473711,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92192,SAN DIEGO,CA,PRIMARY,695,1002,40979982,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28503,KINSTON,NC,PRIMARY,381,644,11064754,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +5846,ISLAND POND,VT,PRIMARY,636,1053,13151508,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +10473,BRONX,NY,PRIMARY,26122,42812,817850845,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +90071,LOS ANGELES,CA,PRIMARY,639,964,75762361,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +84414,OGDEN,UT,PRIMARY,11433,21986,509315593,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +25506,BRANCHLAND,WV,PRIMARY,1526,2987,42199436,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +1969,ROWLEY,MA,PRIMARY,2926,5259,182610213,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61039,GERMAN VALLEY,IL,PRIMARY,397,756,15061557,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60480,WILLOW SPRINGS,IL,PRIMARY,2859,4851,162192650,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +46767,LIGONIER,IN,PRIMARY,3517,6961,100798989,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +81410,AUSTIN,CO,PRIMARY,681,1247,19461749,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +87317,GAMERCO,NM,PRIMARY,577,988,12731045,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61752,LE ROY,IL,PRIMARY,2078,3813,85876769,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +72366,MARVELL,AR,PRIMARY,918,1629,18975872,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +12737,GLEN SPEY,NY,PRIMARY,852,1516,32340091,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +6109,WETHERSFIELD,CT,PRIMARY,14144,23881,681976153,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92593,TEMECULA,CA,PRIMARY,895,1600,32274104,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +69045,WAUNETA,NE,PRIMARY,461,821,7947200,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +77805,BRYAN,TX,PRIMARY,1151,1910,37628002,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +96705,ELEELE,HI,PRIMARY,1205,2101,45346330,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +74079,STROUD,OK,PRIMARY,1814,3345,55561714,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92552,MORENO VALLEY,CA,PRIMARY,1274,2209,40390418,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +18701,WILKES BARRE,PA,PRIMARY,676,890,11338812,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +21663,SAINT MICHAELS,MD,PRIMARY,1687,2855,76085334,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48616,CHESANING,MI,PRIMARY,3538,6441,105758829,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27045,RURAL HALL,NC,PRIMARY,3955,7167,132984201,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95019,FREEDOM,CA,PRIMARY,3535,7068,101582066,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62853,KELL,IL,PRIMARY,444,816,13040519,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61242,CORDOVA,IL,PRIMARY,608,1057,24052592,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +85539,MIAMI,AZ,PRIMARY,1130,1998,34779837,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98660,VANCOUVER,WA,PRIMARY,4977,7929,171919527,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +54896,WINTER,WI,PRIMARY,767,1313,15481838,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +23523,NORFOLK,VA,PRIMARY,3188,5360,62395602,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +44887,TIRO,OH,PRIMARY,501,898,15340626,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +70509,LAFAYETTE,LA,PRIMARY,843,1423,25328687,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +16915,COUDERSPORT,PA,PRIMARY,2671,4741,84422963,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60565,NAPERVILLE,IL,PRIMARY,19741,37904,1591174879,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +52540,BRIGHTON,IA,PRIMARY,673,1190,17573415,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +7718,BELFORD,NJ,PRIMARY,3101,5572,170026421,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +4920,BINGHAM,ME,PRIMARY,705,1204,14763726,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +55335,GIBBON,MN,PRIMARY,789,1395,19522927,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +5874,WESTFIELD,VT,PRIMARY,257,464,6554608,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +24553,GLADSTONE,VA,PRIMARY,788,1456,23455656,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +88045,LORDSBURG,NM,PRIMARY,1577,2798,43110519,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33607,TAMPA,FL,PRIMARY,12780,19120,295349939,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49046,DELTON,MI,PRIMARY,3282,5973,109608648,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +94949,NOVATO,CA,PRIMARY,8237,14246,477070476,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +44028,COLUMBIA STATION,OH,PRIMARY,4607,7817,183755201,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +44011,AVON,OH,PRIMARY,9615,17564,572075157,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28434,COUNCIL,NC,PRIMARY,516,927,13483314,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37690,TELFORD,TN,PRIMARY,1775,3331,53372385,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +78565,LOS EBANOS,TX,PRIMARY,504,1017,9101957,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +78574,MISSION,TX,PRIMARY,14582,30029,369076600,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +94564,PINOLE,CA,PRIMARY,9214,15898,410951647,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +57570,ROSEBUD,SD,PRIMARY,741,1399,17240936,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +20158,HAMILTON,VA,PRIMARY,1644,3203,141237456,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +97030,GRESHAM,OR,PRIMARY,16314,28011,517481587,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37809,MIDWAY,TN,PRIMARY,834,1580,24600102,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92801,ANAHEIM,CA,PRIMARY,24755,45683,790993358,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49230,BROOKLYN,MI,PRIMARY,5214,9108,177009246,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +59855,PABLO,MT,PRIMARY,596,1075,12586416,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +18510,SCRANTON,PA,PRIMARY,4764,7607,129145124,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +26159,PADEN CITY,WV,PRIMARY,1207,2162,31629367,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +34737,HOWEY IN THE HILLS,FL,PRIMARY,1271,2305,55806159,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +40050,NEW CASTLE,KY,PRIMARY,710,1297,19819131,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +68847,KEARNEY,NE,PRIMARY,7703,13199,267124051,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +8619,TRENTON,NJ,PRIMARY,11573,19382,565510328,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62326,COLCHESTER,IL,PRIMARY,1216,2079,34352762,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27708,DURHAM,NC,PRIMARY,289,345,6548220,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +45663,WEST PORTSMOUTH,OH,PRIMARY,2661,4988,75946276,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +80911,COLORADO SPRINGS,CO,PRIMARY,13584,25331,458792855,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92160,SAN DIEGO,CA,PRIMARY,340,501,11148512,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +36376,WEBB,AL,PRIMARY,980,1828,27107825,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60604,CHICAGO,IL,PRIMARY,922,1192,42414664,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +46637,SOUTH BEND,IN,PRIMARY,6980,11644,238693177,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38460,GOODSPRING,TN,PRIMARY,596,1118,17892699,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28584,SWANSBORO,NC,PRIMARY,5375,9654,173005896,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27574,ROXBORO,NC,PRIMARY,6056,11151,204771737,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60118,DUNDEE,IL,PRIMARY,8129,14431,462291903,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61723,ATLANTA,IL,PRIMARY,1214,2146,43417857,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +41216,EAST POINT,KY,PRIMARY,381,742,13145694,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95001,APTOS,CA,PRIMARY,1003,1526,42277293,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +71935,CADDO GAP,AR,PRIMARY,271,527,5072844,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +68526,LINCOLN,NE,PRIMARY,2024,4025,148597860,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +85037,PHOENIX,AZ,PRIMARY,16110,30898,525204717,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37079,INDIAN MOUND,TN,PRIMARY,1078,2081,32422409,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +8848,MILFORD,NJ,PRIMARY,4036,7470,261146410,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +84054,NORTH SALT LAKE,UT,PRIMARY,6555,12482,336772173,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +24925,CALDWELL,WV,PRIMARY,475,897,15081072,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +73534,DUNCAN,OK,PRIMARY,662,1144,21117935,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32577,MOLINO,FL,PRIMARY,2177,4057,73645693,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +14851,ITHACA,NY,PRIMARY,356,469,9456698,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +63019,CRYSTAL CITY,MO,PRIMARY,2086,3688,68962638,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +16636,DYSART,PA,PRIMARY,377,675,12187008,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +56360,OSAKIS,MN,PRIMARY,1787,3196,44391725,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +17777,WATSONTOWN,PA,PRIMARY,3225,5704,99411335,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +99163,PULLMAN,WA,PRIMARY,9718,15193,329628503,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +80117,KIOWA,CO,PRIMARY,1136,2139,54976626,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +71064,PLAIN DEALING,LA,PRIMARY,1557,2783,41316889,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +76630,BRUCEVILLE,TX,PRIMARY,742,1397,29530450,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +13204,SYRACUSE,NY,PRIMARY,7788,12137,180571623,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +96057,MCCLOUD,CA,PRIMARY,623,1037,12502847,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27325,ROBBINS,NC,PRIMARY,2719,5174,74882494,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +46368,PORTAGE,IN,PRIMARY,18487,32445,713943132,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33609,TAMPA,FL,PRIMARY,8358,12817,502464700,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49076,OLIVET,MI,PRIMARY,1766,3277,58184953,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62919,CAVE IN ROCK,IL,PRIMARY,365,697,10712302,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +45224,CINCINNATI,OH,PRIMARY,10218,16213,307385578,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +17061,MILLERSBURG,PA,PRIMARY,3383,5854,110365357,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +1119,SPRINGFIELD,MA,PRIMARY,5978,9915,191005823,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +80163,LITTLETON,CO,PRIMARY,283,443,16062338,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98101,SEATTLE,WA,PRIMARY,6245,7771,321726983,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +13032,CANASTOTA,NY,PRIMARY,5950,10386,199644445,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +74852,MACOMB,OK,PRIMARY,595,1127,16583731,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +73038,FORT COBB,OK,PRIMARY,763,1410,20184763,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +44634,HOMEWORTH,OH,PRIMARY,1038,1868,34751769,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +87823,LEMITAR,NM,PRIMARY,300,538,6855610,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +11955,MORICHES,NY,PRIMARY,1698,2780,70767286,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +8691,TRENTON,NJ,PRIMARY,7101,13091,573048878,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +84537,ORANGEVILLE,UT,PRIMARY,537,1108,25866959,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +75754,BEN WHEELER,TX,PRIMARY,2428,4452,76850949,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +84601,PROVO,UT,PRIMARY,11603,22642,343200226,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +76059,KEENE,TX,PRIMARY,1918,3540,56418169,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +10603,WHITE PLAINS,NY,PRIMARY,9301,14937,530196836,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +50464,PLYMOUTH,IA,PRIMARY,307,560,9575651,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +43116,COMMERCIAL POINT,OH,PRIMARY,622,1210,27850157,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +71913,HOT SPRINGS NATIONAL PARK,AR,PRIMARY,18793,32485,515123605,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +12192,WEST COXSACKIE,NY,PRIMARY,731,1254,26391056,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +44133,NORTH ROYALTON,OH,PRIMARY,17057,27776,735697398,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +99161,PALOUSE,WA,PRIMARY,685,1243,23759533,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +97862,MILTON FREEWATER,OR,PRIMARY,4570,8481,122656785,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +84043,LEHI,UT,PRIMARY,16368,34837,805922225,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +1026,CUMMINGTON,MA,PRIMARY,520,865,15391058,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +39211,JACKSON,MS,PRIMARY,11198,18806,521231524,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +90255,HUNTINGTON PARK,CA,PRIMARY,28269,54716,708223920,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +93387,BAKERSFIELD,CA,PRIMARY,455,874,10952866,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +31410,SAVANNAH,GA,PRIMARY,11344,19812,566547068,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +1007,BELCHERTOWN,MA,PRIMARY,7097,12895,342529324,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +69358,MORRILL,NE,PRIMARY,908,1617,27166691,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +68856,MERNA,NE,PRIMARY,295,545,7090931,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +57770,PINE RIDGE,SD,PRIMARY,1650,3139,41093990,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +41553,PHELPS,KY,PRIMARY,793,1556,22683216,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +8555,ROOSEVELT,NJ,PRIMARY,444,796,22653514,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +53095,WEST BEND,WI,PRIMARY,13758,24005,596800992,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +74728,BROKEN BOW,OK,PRIMARY,4373,8178,112950201,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +36010,BRUNDIDGE,AL,PRIMARY,2159,3931,55475515,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +36504,ATMORE,AL,PRIMARY,624,1096,16284788,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +41822,HINDMAN,KY,PRIMARY,1155,2175,41339584,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +15469,NORMALVILLE,PA,PRIMARY,972,1752,25874806,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +17086,RICHFIELD,PA,PRIMARY,1070,1974,29545687,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +47345,GREENS FORK,IN,PRIMARY,626,1190,19050091,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62817,BROUGHTON,IL,PRIMARY,253,472,5538551,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +5774,WELLS,VT,PRIMARY,623,1047,15825719,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27943,HATTERAS,NC,PRIMARY,398,648,9853980,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +78151,RUNGE,TX,PRIMARY,572,1060,13996473,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +16877,WARRIORS MARK,PA,PRIMARY,802,1474,33638459,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +17845,MILLMONT,PA,PRIMARY,959,1790,21610107,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +20187,WARRENTON,VA,PRIMARY,6817,13112,459401358,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +22313,ALEXANDRIA,VA,PRIMARY,364,478,16842666,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28470,SHALLOTTE,NC,PRIMARY,3530,6333,88595330,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +17740,JERSEY SHORE,PA,PRIMARY,6054,10809,184817065,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33145,MIAMI,FL,PRIMARY,13944,22151,424788779,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +23432,SUFFOLK,VA,PRIMARY,704,1276,34393045,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +84622,CENTERFIELD,UT,PRIMARY,514,1032,15810366,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +76127,NAVAL AIR STATION/ JRB,TX,PRIMARY,262,405,9208585,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98848,QUINCY,WA,PRIMARY,4255,8293,119200919,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +17058,MIFFLIN,PA,PRIMARY,854,1462,20907837,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +64728,BRONAUGH,MO,PRIMARY,263,497,6383599,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +78599,WESLACO,TX,PRIMARY,1716,3313,44704752,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +12857,OLMSTEDVILLE,NY,PRIMARY,299,512,7057434,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +26505,MORGANTOWN,WV,PRIMARY,11086,16568,381211913,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48236,GROSSE POINTE,MI,PRIMARY,15705,27762,1114680733,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +87197,ALBUQUERQUE,NM,PRIMARY,389,593,12565091,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28092,LINCOLNTON,NC,PRIMARY,15221,28399,495711965,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +46952,MARION,IN,PRIMARY,9773,16978,294968479,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +6824,FAIRFIELD,CT,PRIMARY,14839,26855,1714239972,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92654,LAGUNA HILLS,CA,PRIMARY,577,846,18974739,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +14065,FREEDOM,NY,PRIMARY,791,1467,24554282,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +79245,MEMPHIS,TX,PRIMARY,992,1853,22094630,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +93036,OXNARD,CA,PRIMARY,15745,29528,584645007,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +97348,HALSEY,OR,PRIMARY,653,1231,20100572,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +93011,CAMARILLO,CA,PRIMARY,1178,1879,47507052,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +14212,BUFFALO,NY,PRIMARY,4060,6607,89392718,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +54638,KENDALL,WI,PRIMARY,689,1269,20506162,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +74871,STONEWALL,OK,PRIMARY,972,1773,27850167,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +79772,PECOS,TX,PRIMARY,3962,7403,123068574,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32314,TALLAHASSEE,FL,PRIMARY,986,1510,24537352,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +93275,TULARE,CA,PRIMARY,811,1471,22331244,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +65629,CHADWICK,MO,PRIMARY,256,497,6112241,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48176,SALINE,MI,PRIMARY,9991,18549,597069121,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +6234,BROOKLYN,CT,PRIMARY,3671,6516,157308809,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +13104,MANLIUS,NY,PRIMARY,7629,14081,532753376,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +36006,BILLINGSLEY,AL,PRIMARY,564,1117,19042671,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +35218,BIRMINGHAM,AL,PRIMARY,3046,5263,63831130,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +75243,DALLAS,TX,PRIMARY,24145,40571,824539368,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +71234,DOWNSVILLE,LA,PRIMARY,1778,3359,64091099,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28306,FAYETTEVILLE,NC,PRIMARY,16125,30542,559414772,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +47043,VEVAY,IN,PRIMARY,2340,4158,66898429,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +20143,CATHARPIN,VA,PRIMARY,577,1029,40476976,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +67430,CAWKER CITY,KS,PRIMARY,348,587,7166001,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +2032,EAST WALPOLE,MA,PRIMARY,2082,3705,133930220,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30309,ATLANTA,GA,PRIMARY,12336,15857,923521797,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +14111,NORTH COLLINS,NY,PRIMARY,1577,2725,52890844,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +66285,LENEXA,KS,PRIMARY,257,396,10337807,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +29438,EDISTO ISLAND,SC,PRIMARY,1193,1976,31652508,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61125,ROCKFORD,IL,PRIMARY,270,408,8427726,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +85007,PHOENIX,AZ,PRIMARY,4675,7993,154328707,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +52223,DELHI,IA,PRIMARY,669,1184,20926327,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +10583,SCARSDALE,NY,PRIMARY,19793,36163,528333370,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +65582,VIENNA,MO,PRIMARY,938,1725,25775009,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +96091,TRINITY CENTER,CA,PRIMARY,293,501,3841609,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +42078,SALEM,KY,PRIMARY,763,1447,21640714,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +50841,CORNING,IA,PRIMARY,1374,2411,34696804,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +13224,SYRACUSE,NY,PRIMARY,4167,6872,162086803,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +53153,NORTH PRAIRIE,WI,PRIMARY,1231,2302,68430870,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +36540,GAINESTOWN,AL,PRIMARY,379,735,9178197,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33936,LEHIGH ACRES,FL,PRIMARY,8583,14941,214227701,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +68015,CEDAR BLUFFS,NE,PRIMARY,503,910,15234873,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +73061,MORRISON,OK,PRIMARY,647,1261,23624297,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +79313,ANTON,TX,PRIMARY,719,1332,19098172,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +63548,LANCASTER,MO,PRIMARY,575,1036,12854714,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92101,SAN DIEGO,CA,PRIMARY,16788,21606,941833907,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +19055,LEVITTOWN,PA,PRIMARY,6888,11766,258651472,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61853,MAHOMET,IL,PRIMARY,5916,11293,298336301,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +66524,OVERBROOK,KS,PRIMARY,1209,2211,43794745,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28411,WILMINGTON,NC,PRIMARY,14204,25564,625581389,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +47858,LEWIS,IN,PRIMARY,304,575,10038558,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +52801,DAVENPORT,IA,PRIMARY,327,400,7177890,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +35055,CULLMAN,AL,PRIMARY,7814,14351,246815105,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62629,CHATHAM,IL,PRIMARY,6169,11461,292298197,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37391,TURTLETOWN,TN,PRIMARY,534,1026,12386548,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +56484,WALKER,MN,PRIMARY,1918,3282,48746056,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +50597,WEST BEND,IA,PRIMARY,683,1186,15999293,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60169,HOFFMAN ESTATES,IL,PRIMARY,15505,27194,658665469,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +43571,WHITEHOUSE,OH,PRIMARY,3392,6054,176743842,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28368,OLIVIA,NC,PRIMARY,379,660,10336533,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +67056,HALSTEAD,KS,PRIMARY,1385,2539,47720990,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +66012,BONNER SPRINGS,KS,PRIMARY,5190,9439,228149876,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +58621,BEACH,ND,PRIMARY,673,1119,17569061,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +24266,LEBANON,VA,PRIMARY,4042,7466,135873255,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +78234,SAN ANTONIO,TX,PRIMARY,1673,3432,69135921,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +70116,NEW ORLEANS,LA,PRIMARY,4612,6580,129465605,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +63071,RICHWOODS,MO,PRIMARY,425,805,11093285,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98359,OLALLA,WA,PRIMARY,2273,4171,94563736,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48108,ANN ARBOR,MI,PRIMARY,12492,21112,666763111,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +45761,MILLFIELD,OH,PRIMARY,703,1225,16837663,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +54891,WASHBURN,WI,PRIMARY,1646,2834,52026716,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49920,CRYSTAL FALLS,MI,PRIMARY,1921,3287,46237483,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +90755,SIGNAL HILL,CA,PRIMARY,5205,8468,258295992,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +7013,CLIFTON,NJ,PRIMARY,14153,23975,704167193,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +10517,CROMPOND,NY,PRIMARY,454,809,25296212,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +3854,NEW CASTLE,NH,PRIMARY,590,959,45088962,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +68959,MINDEN,NE,PRIMARY,1813,3260,52873387,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33918,NORTH FORT MYERS,FL,PRIMARY,631,979,13641891,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +1522,JEFFERSON,MA,PRIMARY,1672,2984,93338120,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +29449,HOLLYWOOD,SC,PRIMARY,3509,6131,112055204,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +31824,PRESTON,GA,PRIMARY,704,1335,18183190,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +72211,LITTLE ROCK,AR,PRIMARY,10054,17089,502041111,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32327,CRAWFORDVILLE,FL,PRIMARY,9839,17961,351500595,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +13691,THERESA,NY,PRIMARY,1300,2395,40555190,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +26534,GRANVILLE,WV,PRIMARY,466,770,10349617,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +86047,WINSLOW,AZ,PRIMARY,4897,9122,144717338,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +55430,MINNEAPOLIS,MN,PRIMARY,9978,16574,312911213,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +36526,DAPHNE,AL,PRIMARY,12122,22153,564796697,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92504,RIVERSIDE,CA,PRIMARY,20774,37665,723697102,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +90295,MARINA DEL REY,CA,PRIMARY,875,1166,40862193,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +59104,BILLINGS,MT,PRIMARY,562,872,19440224,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28612,CONNELLYS SPRINGS,NC,PRIMARY,4354,8122,125108281,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +78550,HARLINGEN,TX,PRIMARY,19279,35476,552425626,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49233,CEMENT CITY,MI,PRIMARY,1257,2256,42359689,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +42729,CUB RUN,KY,PRIMARY,507,991,10849515,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32771,SANFORD,FL,PRIMARY,21728,38014,858548338,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +65640,DUNNEGAN,MO,PRIMARY,322,600,6277740,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +45389,CHRISTIANSBURG,OH,PRIMARY,282,481,7842436,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +44609,BELOIT,OH,PRIMARY,1779,3143,56381065,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49047,DOWAGIAC,MI,PRIMARY,6530,11320,175759009,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61101,ROCKFORD,IL,PRIMARY,9028,15650,216110837,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +78553,HARLINGEN,TX,PRIMARY,1157,2013,30328230,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98932,GRANGER,WA,PRIMARY,1834,3725,46775377,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +67853,INGALLS,KS,PRIMARY,385,724,11320016,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +4428,EDDINGTON,ME,PRIMARY,1426,2514,47557054,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +99521,ANCHORAGE,AK,PRIMARY,902,1389,35257804,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +16827,BOALSBURG,PA,PRIMARY,2269,3952,103160487,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +56349,LOWRY,MN,PRIMARY,362,664,11998989,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +63461,PALMYRA,MO,PRIMARY,2678,4885,83359101,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +3842,HAMPTON,NH,PRIMARY,7982,13085,395486874,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +73773,WAUKOMIS,OK,PRIMARY,771,1414,23811775,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +34607,SPRING HILL,FL,PRIMARY,3580,6196,108320693,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +47260,MEDORA,IN,PRIMARY,886,1612,24079158,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62629,CHATHAM,IL,PRIMARY,6169,11461,292298197,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +31909,COLUMBUS,GA,PRIMARY,15634,27967,596204109,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +76104,FORT WORTH,TX,PRIMARY,6058,10534,142325465,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +71069,RODESSA,LA,PRIMARY,271,509,7284560,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +25181,SETH,WV,PRIMARY,587,1152,20917456,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +91763,MONTCLAIR,CA,PRIMARY,14454,27063,453244725,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +99740,FORT YUKON,AK,PRIMARY,278,420,6385108,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +16040,HILLIARDS,PA,PRIMARY,438,781,11862271,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +83350,RUPERT,ID,PRIMARY,4962,9622,134737047,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60505,AURORA,IL,PRIMARY,23662,44855,636961078,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +76903,SAN ANGELO,TX,PRIMARY,13472,23126,320563090,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +29583,PAMPLICO,SC,PRIMARY,2208,4058,59088838,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95472,SEBASTOPOL,CA,PRIMARY,13081,21680,493650051,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +29506,FLORENCE,SC,PRIMARY,7824,13667,190085130,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +71462,NOBLE,LA,PRIMARY,541,1049,17675682,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28341,FAISON,NC,PRIMARY,1756,3329,45375995,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38114,MEMPHIS,TN,PRIMARY,12739,21150,233344846,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +46184,WHITELAND,IN,PRIMARY,5418,10100,215147469,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +90633,LA HABRA,CA,PRIMARY,301,508,8864607,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +63051,HOUSE SPRINGS,MO,PRIMARY,6579,11892,245384916,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +35034,BRENT,AL,PRIMARY,1689,3090,43472736,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98021,BOTHELL,WA,PRIMARY,12362,22499,747557806,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +96837,HONOLULU,HI,PRIMARY,506,772,15103378,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +66010,BLUE MOUND,KS,PRIMARY,275,489,6519930,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62881,SALEM,IL,PRIMARY,5311,9380,156322898,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +26187,WILLIAMSTOWN,WV,PRIMARY,2791,5122,106783094,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +75972,SAN AUGUSTINE,TX,PRIMARY,2694,4840,65068750,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +79029,DUMAS,TX,PRIMARY,6753,12948,231494323,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +17737,HUGHESVILLE,PA,PRIMARY,3052,5359,95918965,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95306,CATHEYS VALLEY,CA,PRIMARY,414,781,14170835,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38034,FRIENDSHIP,TN,PRIMARY,1132,2142,31157716,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49454,SCOTTVILLE,MI,PRIMARY,2108,3847,56437606,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +67428,CANTON,KS,PRIMARY,706,1317,24683170,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +20817,BETHESDA,MD,PRIMARY,17115,31025,1785078706,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32176,ORMOND BEACH,FL,PRIMARY,7350,11627,220641577,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30152,KENNESAW,GA,PRIMARY,17602,33285,1107231532,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32720,DELAND,FL,PRIMARY,12134,21124,353565495,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +5487,STARKSBORO,VT,PRIMARY,763,1323,25310357,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +21787,TANEYTOWN,MD,PRIMARY,5024,9033,195175957,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98908,YAKIMA,WA,PRIMARY,16300,29357,648522174,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +72858,POTTSVILLE,AR,PRIMARY,1374,2692,43381017,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +77670,VIDOR,TX,PRIMARY,867,1518,26748294,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49635,FRANKFORT,MI,PRIMARY,1747,2913,34972330,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +68517,LINCOLN,NE,PRIMARY,251,476,11422901,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38326,COUNCE,TN,PRIMARY,1036,1859,33972097,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +71747,HUTTIG,AR,PRIMARY,399,742,11230738,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +76471,RISING STAR,TX,PRIMARY,673,1221,15839607,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +83832,GENESEE,ID,PRIMARY,688,1314,25534920,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +43767,NORWICH,OH,PRIMARY,705,1296,24740458,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +43545,NAPOLEON,OH,PRIMARY,7261,12429,231187111,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +42452,ROBARDS,KY,PRIMARY,1047,2032,41156077,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +53594,WATERLOO,WI,PRIMARY,2660,4736,97656259,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +35078,HARPERSVILLE,AL,PRIMARY,958,1768,30258698,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +13034,CAYUGA,NY,PRIMARY,834,1505,30953085,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +54812,BARRON,WI,PRIMARY,2764,4756,73105539,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +75054,GRAND PRAIRIE,TX,PRIMARY,3197,6470,223864571,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +56304,SAINT CLOUD,MN,PRIMARY,6772,10660,209766787,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +43023,GRANVILLE,OH,PRIMARY,5481,10048,331579502,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33685,TAMPA,FL,PRIMARY,698,1095,17808652,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +71424,ELMER,LA,PRIMARY,502,984,18205660,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +25621,GILBERT,WV,PRIMARY,1036,2049,38283828,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +77856,FRANKLIN,TX,PRIMARY,1882,3468,60191662,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38618,COLDWATER,MS,PRIMARY,5009,9294,153034488,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +13652,HERMON,NY,PRIMARY,740,1353,20921242,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +55612,LUTSEN,MN,PRIMARY,282,455,6161213,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +45882,ROCKFORD,OH,PRIMARY,1517,2589,43053646,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30452,REGISTER,GA,PRIMARY,674,1255,18664920,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +47401,BLOOMINGTON,IN,PRIMARY,16842,27740,731808245,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32346,PANACEA,FL,PRIMARY,821,1408,20696358,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +47620,MOUNT VERNON,IN,PRIMARY,6328,11552,248647326,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +23234,RICHMOND,VA,PRIMARY,18687,31921,562632911,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +79604,ABILENE,TX,PRIMARY,727,1206,22681587,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +13116,MINOA,NY,PRIMARY,1711,2992,65411187,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +63945,HARVIELL,MO,PRIMARY,492,934,12523376,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32714,ALTAMONTE SPRINGS,FL,PRIMARY,17149,27758,602956990,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +66724,CHEROKEE,KS,PRIMARY,485,882,12946467,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +18214,BARNESVILLE,PA,PRIMARY,1105,1939,37504004,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +81401,MONTROSE,CO,PRIMARY,11102,19993,329361407,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27883,STANTONSBURG,NC,PRIMARY,1494,2693,40520695,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38174,MEMPHIS,TN,PRIMARY,342,486,10914282,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +53968,WONEWOC,WI,PRIMARY,1081,1895,28801628,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +65063,NEW BLOOMFIELD,MO,PRIMARY,1543,2892,56761511,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +55373,ROCKFORD,MN,PRIMARY,2702,4831,130223746,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62952,JONESBORO,IL,PRIMARY,1513,2746,42934076,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60516,DOWNERS GROVE,IL,PRIMARY,15386,26828,848619357,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62547,MOUNT AUBURN,IL,PRIMARY,379,687,12554192,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +18414,DALTON,PA,PRIMARY,2787,4924,125431491,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +54006,CUSHING,WI,PRIMARY,397,695,12805598,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +7605,LEONIA,NJ,PRIMARY,4156,7589,259754085,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +22971,SHIPMAN,VA,PRIMARY,732,1291,22138662,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +58207,GRAND FORKS,ND,PRIMARY,273,285,4275788,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +47620,MOUNT VERNON,IN,PRIMARY,6328,11552,248647326,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +73084,SPENCER,OK,PRIMARY,2887,4808,63783363,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33074,POMPANO BEACH,FL,PRIMARY,385,596,10290584,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +67002,ANDOVER,KS,PRIMARY,5786,11299,363402717,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49272,PLEASANT LAKE,MI,PRIMARY,1197,2196,44714765,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +11947,JAMESPORT,NY,PRIMARY,741,1261,37830806,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +70441,GREENSBURG,LA,PRIMARY,2122,3865,53481936,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +79915,EL PASO,TX,PRIMARY,16044,29001,316247822,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +2462,NEWTON LOWER FALLS,MA,PRIMARY,689,1184,50636755,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48244,DETROIT,MI,PRIMARY,336,504,10093547,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +74641,KAW CITY,OK,PRIMARY,319,563,9379117,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +21054,GAMBRILLS,MD,PRIMARY,5092,9367,354966758,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33335,FORT LAUDERDALE,FL,PRIMARY,351,504,13145133,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +54742,FALL CREEK,WI,PRIMARY,2196,4070,85292045,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92607,LAGUNA NIGUEL,CA,PRIMARY,510,795,21622264,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98065,SNOQUALMIE,WA,PRIMARY,5184,10415,442377417,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +80514,DACONO,CO,PRIMARY,1762,3217,67359626,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +85935,PINETOP,AZ,PRIMARY,2142,3965,67863773,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +52233,HIAWATHA,IA,PRIMARY,3417,5715,133754293,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +12833,GREENFIELD CENTER,NY,PRIMARY,2173,3878,97388018,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +10459,BRONX,NY,PRIMARY,17977,30434,414021653,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92607,LAGUNA NIGUEL,CA,PRIMARY,510,795,21622264,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +36854,VALLEY,AL,PRIMARY,6440,11676,184575673,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +11765,MILL NECK,NY,PRIMARY,371,609,53923307,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +3455,MARLBOROUGH,NH,PRIMARY,1140,1895,42897804,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +57314,ARTESIAN,SD,PRIMARY,320,545,6148004,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +39074,FOREST,MS,PRIMARY,5837,10722,147109672,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +41174,SOUTH PORTSMOUTH,KY,PRIMARY,380,733,11769032,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +70126,NEW ORLEANS,LA,PRIMARY,8133,14185,198343652,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33076,POMPANO BEACH,FL,PRIMARY,12888,25322,947666207,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +58602,DICKINSON,ND,PRIMARY,415,663,15083214,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +1902,LYNN,MA,PRIMARY,18627,30268,565476884,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62351,MENDON,IL,PRIMARY,879,1613,28403364,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +20036,WASHINGTON,DC,PRIMARY,3854,4479,276290738,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95645,KNIGHTS LANDING,CA,PRIMARY,662,1285,20033406,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95524,BAYSIDE,CA,PRIMARY,1038,1740,32200527,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +71264,OAK RIDGE,LA,PRIMARY,400,743,12612818,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +4429,HOLDEN,ME,PRIMARY,2427,4302,104362764,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +46507,BRISTOL,IN,PRIMARY,4426,8126,185424431,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +90012,LOS ANGELES,CA,PRIMARY,8834,13839,288859252,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +23093,LOUISA,VA,PRIMARY,5577,9999,208591582,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +46268,INDIANAPOLIS,IN,PRIMARY,12562,20516,460320675,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +16866,PHILIPSBURG,PA,PRIMARY,3822,6561,115339961,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +80653,WELDONA,CO,PRIMARY,340,626,10585279,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +97333,CORVALLIS,OR,PRIMARY,8497,13876,300513923,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61841,FAIRMOUNT,IL,PRIMARY,663,1218,22931130,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +77836,CALDWELL,TX,PRIMARY,4813,8783,156682163,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +63748,FROHNA,MO,PRIMARY,450,811,13386812,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +29368,MAYO,SC,PRIMARY,413,708,10844329,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +6119,WEST HARTFORD,CT,PRIMARY,7507,12133,401473695,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61480,STRONGHURST,IL,PRIMARY,641,1149,18219366,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +35473,NORTHPORT,AL,PRIMARY,6354,11384,243262138,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37873,SURGOINSVILLE,TN,PRIMARY,1736,3242,46289333,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +78538,EDCOUCH,TX,PRIMARY,3415,6689,70887559,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +70514,BALDWIN,LA,PRIMARY,1255,2287,37233044,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +89113,LAS VEGAS,NV,PRIMARY,11226,18126,578215900,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +91767,POMONA,CA,PRIMARY,18442,33720,578550841,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +26590,WANA,WV,PRIMARY,297,574,10443151,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +36867,PHENIX CITY,AL,PRIMARY,8405,14901,255624855,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +54733,DALLAS,WI,PRIMARY,623,1149,18162095,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48210,DETROIT,MI,PRIMARY,9780,18392,173358285,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +6420,SALEM,CT,PRIMARY,2052,3806,128811331,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +93505,CALIFORNIA CITY,CA,PRIMARY,3792,7123,143873013,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +52157,MC GREGOR,IA,PRIMARY,856,1458,24791324,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +46217,INDIANAPOLIS,IN,PRIMARY,14727,25860,617589207,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +3284,SPRINGFIELD,NH,PRIMARY,377,676,12629305,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +96740,KAILUA KONA,HI,PRIMARY,11334,18960,383807736,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +56169,RUSSELL,MN,PRIMARY,335,611,11094716,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +57528,COLOME,SD,PRIMARY,341,644,6658527,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +17584,WILLOW STREET,PA,PRIMARY,4628,8112,153995957,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37409,CHATTANOOGA,TN,PRIMARY,1415,2336,39471123,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +75432,COOPER,TX,PRIMARY,1357,2477,40152215,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +96001,REDDING,CA,PRIMARY,13491,23744,469950362,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +44851,NEW LONDON,OH,PRIMARY,2564,4542,77557285,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +94130,SAN FRANCISCO,CA,PRIMARY,966,1387,33246327,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95367,RIVERBANK,CA,PRIMARY,9078,17864,348987321,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +94546,CASTRO VALLEY,CA,PRIMARY,20616,35351,987939047,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +36575,SEMMES,AL,PRIMARY,7673,14723,277891064,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61420,BLANDINSVILLE,IL,PRIMARY,537,961,14340835,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +77423,BROOKSHIRE,TX,PRIMARY,3400,6299,114609605,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +6450,MERIDEN,CT,PRIMARY,17977,29435,718014481,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +64640,GALLATIN,MO,PRIMARY,1374,2538,37555781,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61778,WAYNESVILLE,IL,PRIMARY,330,607,10800525,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +71111,BOSSIER CITY,LA,PRIMARY,15934,28820,640130529,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +77578,MANVEL,TX,PRIMARY,5615,10680,355621459,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38847,GOLDEN,MS,PRIMARY,1326,2592,37582287,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +26624,GASSAWAY,WV,PRIMARY,1199,2151,36421050,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +53034,HUSTISFORD,WI,PRIMARY,964,1653,35215160,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +96091,TRINITY CENTER,CA,PRIMARY,293,501,3841609,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +42345,GREENVILLE,KY,PRIMARY,4432,8336,140443503,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +80234,DENVER,CO,PRIMARY,12131,20539,574285443,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +64117,KANSAS CITY,MO,PRIMARY,6677,11001,198354416,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33538,LAKE PANASOFFKEE,FL,PRIMARY,1828,3199,41467056,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +96055,LOS MOLINOS,CA,PRIMARY,1466,2732,40104959,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +50471,RUDD,IA,PRIMARY,345,614,9547507,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +3218,BARNSTEAD,NH,PRIMARY,592,1040,22542745,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +40855,LYNCH,KY,PRIMARY,343,644,9364593,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +82054,CARPENTER,WY,PRIMARY,351,668,12280782,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33852,LAKE PLACID,FL,PRIMARY,7633,13363,156977577,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +23225,RICHMOND,VA,PRIMARY,18633,29120,655538330,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +16501,ERIE,PA,PRIMARY,614,778,6263221,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95036,MILPITAS,CA,PRIMARY,824,1343,40689789,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62684,SHERMAN,IL,PRIMARY,2387,4514,115618522,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33410,PALM BEACH GARDENS,FL,PRIMARY,19757,29752,824014973,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +68376,HUMBOLDT,NE,PRIMARY,681,1199,15668981,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48105,ANN ARBOR,MI,PRIMARY,15149,25072,901733416,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27217,BURLINGTON,NC,PRIMARY,14999,27330,403486230,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +44117,EUCLID,OH,PRIMARY,4773,7440,134687685,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +97114,DAYTON,OR,PRIMARY,2052,3869,68317318,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +14813,BELMONT,NY,PRIMARY,1046,1857,30299947,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +50130,JEWELL,IA,PRIMARY,786,1482,29802717,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +23050,DUTTON,VA,PRIMARY,346,640,13112053,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +19035,GLADWYNE,PA,PRIMARY,2151,3761,353254555,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +1902,LYNN,MA,PRIMARY,18627,30268,565476884,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +97202,PORTLAND,OR,PRIMARY,19428,29321,782621168,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30271,NEWNAN,GA,PRIMARY,497,834,17503903,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95311,COULTERVILLE,CA,PRIMARY,781,1402,19322366,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +39096,LORMAN,MS,PRIMARY,860,1507,20656446,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +2767,RAYNHAM,MA,PRIMARY,6595,11607,343769637,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +8021,CLEMENTON,NJ,PRIMARY,22635,37348,819258817,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +54205,CASCO,WI,PRIMARY,1035,1910,36998005,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +89179,LAS VEGAS,NV,PRIMARY,1300,2329,72009836,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +34433,DUNNELLON,FL,PRIMARY,2798,5014,69415228,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +96049,REDDING,CA,PRIMARY,1324,2172,39128730,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +55373,ROCKFORD,MN,PRIMARY,2702,4831,130223746,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +77433,CYPRESS,TX,PRIMARY,18562,37962,1278558250,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62014,BUNKER HILL,IL,PRIMARY,1823,3375,62651385,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95374,STEVINSON,CA,PRIMARY,685,1329,19206707,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +8724,BRICK,NJ,PRIMARY,21871,37268,994471914,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +50402,MASON CITY,IA,PRIMARY,315,455,8193863,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +77020,HOUSTON,TX,PRIMARY,9757,17899,242110745,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27809,BATTLEBORO,NC,PRIMARY,2203,4020,74212407,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +45760,MIDDLEPORT,OH,PRIMARY,1344,2438,35409296,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92678,TRABUCO CANYON,CA,PRIMARY,330,561,16470274,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +23150,SANDSTON,VA,PRIMARY,6194,10558,234960055,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +97361,MONMOUTH,OR,PRIMARY,4168,7320,133670416,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +40209,LOUISVILLE,KY,PRIMARY,297,475,6930788,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +2047,HUMAROCK,MA,PRIMARY,377,595,13510944,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +12009,ALTAMONT,NY,PRIMARY,3502,6187,170041565,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +54135,KESHENA,WI,PRIMARY,1234,2191,27631487,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +4543,DAMARISCOTTA,ME,PRIMARY,1234,1996,28687557,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +42755,LEITCHFIELD,KY,PRIMARY,311,563,8955080,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +45018,FAIRFIELD,OH,PRIMARY,280,434,8702766,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27818,COMO,NC,PRIMARY,573,1024,16188667,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +64096,WAVERLY,MO,PRIMARY,485,894,14868497,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +14204,BUFFALO,NY,PRIMARY,3294,5302,73479105,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49046,DELTON,MI,PRIMARY,3282,5973,109608648,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61535,GROVELAND,IL,PRIMARY,801,1536,44292423,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +74002,BARNSDALL,OK,PRIMARY,915,1708,28091450,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +72854,OZONE,AR,PRIMARY,259,488,5550503,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +13159,TULLY,NY,PRIMARY,2516,4590,110806140,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +79313,ANTON,TX,PRIMARY,719,1332,19098172,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +84663,SPRINGVILLE,UT,PRIMARY,10892,22010,417211220,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +41005,BURLINGTON,KY,PRIMARY,9904,18294,445749587,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98374,PUYALLUP,WA,PRIMARY,16053,29998,774870786,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +66604,TOPEKA,KS,PRIMARY,11875,18933,358938885,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30293,WOODBURY,GA,PRIMARY,1289,2377,32501976,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33243,MIAMI,FL,PRIMARY,318,461,8904012,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +41073,BELLEVUE,KY,PRIMARY,2990,4685,100072674,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38948,OAKLAND,MS,PRIMARY,877,1576,20403335,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +77650,PORT BOLIVAR,TX,PRIMARY,674,1139,17505164,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +29856,WINDSOR,SC,PRIMARY,1075,2051,28668479,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +64114,KANSAS CITY,MO,PRIMARY,13330,19663,509733149,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +76238,ERA,TX,PRIMARY,252,474,9302032,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +14466,HEMLOCK,NY,PRIMARY,873,1557,29993583,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +18431,HONESDALE,PA,PRIMARY,6306,10706,185241782,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +15027,CONWAY,PA,PRIMARY,1121,1918,34815855,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +1370,SHELBURNE FALLS,MA,PRIMARY,2105,3439,65890148,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +46204,INDIANAPOLIS,IN,PRIMARY,2151,2623,126173588,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37771,LENOIR CITY,TN,PRIMARY,6751,12281,209398463,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +12431,FREEHOLD,NY,PRIMARY,683,1188,23087267,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +53214,MILWAUKEE,WI,PRIMARY,17812,27867,566023689,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +46072,TIPTON,IN,PRIMARY,4541,7955,144866056,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30634,DEWY ROSE,GA,PRIMARY,1016,1904,25586062,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95977,SMARTSVILLE,CA,PRIMARY,619,1146,21308751,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +29596,WALLACE,SC,PRIMARY,1015,1860,26227201,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +35981,IDER,AL,PRIMARY,854,1689,24829511,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +42086,WEST PADUCAH,KY,PRIMARY,1863,3509,69783114,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +93730,FRESNO,CA,PRIMARY,3993,7681,311497250,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +4653,BASS HARBOR,ME,PRIMARY,288,473,7116735,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +53021,FREDONIA,WI,PRIMARY,2311,4219,109776583,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95073,SOQUEL,CA,PRIMARY,5537,9169,259513233,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +3605,LEMPSTER,NH,PRIMARY,486,837,15816778,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +65655,GAINESVILLE,MO,PRIMARY,1265,2279,27307412,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +56262,MILAN,MN,PRIMARY,341,605,9198202,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61528,EDWARDS,IL,PRIMARY,1220,2369,85914917,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +16218,COWANSVILLE,PA,PRIMARY,529,936,16726545,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +8310,BUENA,NJ,PRIMARY,927,1546,30173830,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +94963,SAN GERONIMO,CA,PRIMARY,294,485,14591953,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +13209,SYRACUSE,NY,PRIMARY,6817,11191,214056205,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60005,ARLINGTON HEIGHTS,IL,PRIMARY,15155,25773,797251202,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +7764,WEST LONG BRANCH,NJ,PRIMARY,3343,5838,175716375,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62061,MARINE,IL,PRIMARY,864,1517,34372344,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30175,TALKING ROCK,GA,PRIMARY,2311,4487,74264784,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +44807,ATTICA,OH,PRIMARY,1170,2044,34240317,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +97089,DAMASCUS,OR,PRIMARY,4882,9159,225703744,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +72205,LITTLE ROCK,AR,PRIMARY,11104,17073,414729310,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +6351,JEWETT CITY,CT,PRIMARY,7952,13771,339228486,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +56671,REDLAKE,MN,PRIMARY,1204,2214,25934497,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +74446,OKAY,OK,PRIMARY,465,848,12840162,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +8558,SKILLMAN,NJ,PRIMARY,3140,6048,468109768,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38859,NEW SITE,MS,PRIMARY,267,523,6792114,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92546,HEMET,CA,PRIMARY,675,1153,17248817,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +21851,POCOMOKE CITY,MD,PRIMARY,3441,5977,105001707,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32572,MILTON,FL,PRIMARY,498,845,15077019,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +17935,GIRARDVILLE,PA,PRIMARY,793,1285,20377016,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30677,WATKINSVILLE,GA,PRIMARY,7320,14161,339000189,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +12578,SALT POINT,NY,PRIMARY,1137,2043,60259098,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +96729,HOOLEHUA,HI,PRIMARY,380,737,10614688,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +70112,NEW ORLEANS,LA,PRIMARY,1543,2310,46228230,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +53705,MADISON,WI,PRIMARY,12878,20296,613364030,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +72315,BLYTHEVILLE,AR,PRIMARY,9088,16578,276679318,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49009,KALAMAZOO,MI,PRIMARY,18841,32845,898833232,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +20119,CATLETT,VA,PRIMARY,1725,3067,83160072,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +6093,WEST SUFFIELD,CT,PRIMARY,1757,3130,98172513,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62479,WHEELER,IL,PRIMARY,428,762,10944585,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +26711,CAPON BRIDGE,WV,PRIMARY,1191,2192,42904971,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +45616,BLUE CREEK,OH,PRIMARY,518,967,12386710,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +39402,HATTIESBURG,MS,PRIMARY,15516,27793,678391743,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92661,NEWPORT BEACH,CA,PRIMARY,2023,2915,131354093,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +7649,ORADELL,NJ,PRIMARY,3914,7355,369791979,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +66614,TOPEKA,KS,PRIMARY,15992,27021,597331998,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +84083,WENDOVER,UT,PRIMARY,669,1265,16789224,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +47577,SAINT MEINRAD,IN,PRIMARY,509,871,15383942,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +87901,TRUTH OR CONSEQUENCES,NM,PRIMARY,3001,4730,49361750,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +78130,NEW BRAUNFELS,TX,PRIMARY,26382,47380,915619654,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61019,DAVIS,IL,PRIMARY,1668,3112,55956024,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +44878,SHILOH,OH,PRIMARY,1270,2326,32591722,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +50216,PANORA,IA,PRIMARY,1306,2424,55330227,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +41055,MAYSLICK,KY,PRIMARY,761,1454,22078452,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +46914,BUNKER HILL,IN,PRIMARY,1037,1787,29361166,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +10024,NEW YORK,NY,PRIMARY,31925,47261,1108944328,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +5669,ROXBURY,VT,PRIMARY,268,462,6659703,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +71325,CHENEYVILLE,LA,PRIMARY,490,905,11902789,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27804,ROCKY MOUNT,NC,PRIMARY,12981,22882,457893619,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +43432,GRAYTOWN,OH,PRIMARY,743,1313,29499460,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30360,ATLANTA,GA,PRIMARY,6046,10858,263685354,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +19956,LAUREL,DE,PRIMARY,6858,12111,200021101,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27537,HENDERSON,NC,PRIMARY,9009,16584,260448570,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +67074,LEON,KS,PRIMARY,802,1463,30617862,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +67672,WAKEENEY,KS,PRIMARY,1260,2142,32841547,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +56623,BAUDETTE,MN,PRIMARY,1395,2425,37453913,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +43901,ADENA,OH,PRIMARY,971,1712,31350149,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98855,TONASKET,WA,PRIMARY,2361,4299,55454229,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +99921,CRAIG,AK,PRIMARY,824,1367,25062656,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +71676,WILMOT,AR,PRIMARY,305,549,5734747,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +94571,RIO VISTA,CA,PRIMARY,3913,6699,129395123,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +68638,FULLERTON,NE,PRIMARY,788,1437,19517537,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +87745,SAPELLO,NM,PRIMARY,264,484,6068769,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +71107,SHREVEPORT,LA,PRIMARY,13265,23793,393082419,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +67004,ARGONIA,KS,PRIMARY,381,705,11071808,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +12009,ALTAMONT,NY,PRIMARY,3502,6187,170041565,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +41002,AUGUSTA,KY,PRIMARY,1023,1898,30704824,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +71474,SIMPSON,LA,PRIMARY,323,600,12646353,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +40977,PINEVILLE,KY,PRIMARY,3058,5825,84322345,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +64865,SENECA,MO,PRIMARY,2468,4624,70179487,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +68450,TECUMSEH,NE,PRIMARY,1133,2029,31652377,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62803,HOYLETON,IL,PRIMARY,528,927,14921593,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +55971,RUSHFORD,MN,PRIMARY,1418,2598,44984750,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62037,GRAFTON,IL,PRIMARY,929,1699,37991685,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +46565,SHIPSHEWANA,IN,PRIMARY,3171,5947,86771510,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +10969,PINE ISLAND,NY,PRIMARY,623,1106,27734147,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +6712,PROSPECT,CT,PRIMARY,4689,8427,245168855,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +6354,MOOSUP,CT,PRIMARY,2619,4561,101944212,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +36467,OPP,AL,PRIMARY,4126,7660,110214704,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +35808,HUNTSVILLE,AL,PRIMARY,388,896,17497561,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +47449,NEWBERRY,IN,PRIMARY,273,491,8371430,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61764,PONTIAC,IL,PRIMARY,6262,10677,201511259,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +54151,NIAGARA,WI,PRIMARY,1778,3100,56375967,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +83840,KOOTENAI,ID,PRIMARY,300,537,7759110,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +71854,TEXARKANA,AR,PRIMARY,14812,26511,456447690,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +13668,NORWOOD,NY,PRIMARY,1467,2591,47386113,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +59018,CLYDE PARK,MT,PRIMARY,296,524,7916287,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49738,GRAYLING,MI,PRIMARY,4550,7941,107679728,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +55368,NORWOOD YOUNG AMERICA,MN,PRIMARY,1115,1961,40306149,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30566,OAKWOOD,GA,PRIMARY,3720,6624,119917038,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +17101,HARRISBURG,PA,PRIMARY,1053,1215,26229154,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +11434,JAMAICA,NY,PRIMARY,28175,44414,922046847,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +77426,CHAPPELL HILL,TX,PRIMARY,933,1654,41947435,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +70005,METAIRIE,LA,PRIMARY,12020,19374,634099734,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +1560,SOUTH GRAFTON,MA,PRIMARY,2022,3675,123503278,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +66748,HUMBOLDT,KS,PRIMARY,1274,2288,37506705,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +26501,MORGANTOWN,WV,PRIMARY,8042,13220,266869676,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +7073,EAST RUTHERFORD,NJ,PRIMARY,4707,7669,217842919,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +77498,SUGAR LAND,TX,PRIMARY,13340,26061,686997827,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +21875,DELMAR,MD,PRIMARY,2981,5235,105266691,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +21719,CASCADE,MD,PRIMARY,757,1331,26938345,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60302,OAK PARK,IL,PRIMARY,16180,27186,1137923677,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95448,HEALDSBURG,CA,PRIMARY,8593,14838,343340891,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +85260,SCOTTSDALE,AZ,PRIMARY,18493,29865,1084111228,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +51241,LARCHWOOD,IA,PRIMARY,754,1436,25743107,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +22841,MOUNT CRAWFORD,VA,PRIMARY,1360,2419,51697444,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +72633,EVERTON,AR,PRIMARY,571,1126,15165496,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38504,ALLARDT,TN,PRIMARY,599,1138,14846343,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +93955,SEASIDE,CA,PRIMARY,13223,24162,450948197,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +25827,CRAB ORCHARD,WV,PRIMARY,1386,2554,50613288,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +90404,SANTA MONICA,CA,PRIMARY,11003,16220,754476822,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +12090,HOOSICK FALLS,NY,PRIMARY,2688,4771,92694191,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +6085,UNIONVILLE,CT,PRIMARY,3456,6187,209701987,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +31566,WAYNESVILLE,GA,PRIMARY,1380,2610,35740098,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +55722,COLERAINE,MN,PRIMARY,550,1002,17985299,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +2163,BOSTON,MA,PRIMARY,630,723,46464955,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +89318,MC GILL,NV,PRIMARY,516,896,17746290,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +29353,JONESVILLE,SC,PRIMARY,1818,3229,48951710,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +64423,BARNARD,MO,PRIMARY,345,639,11174112,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +19468,ROYERSFORD,PA,PRIMARY,12835,22357,704775892,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +72902,FORT SMITH,AR,PRIMARY,574,937,16107173,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +1731,HANSCOM AFB,MA,PRIMARY,596,1343,28379331,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +3302,CONCORD,NH,PRIMARY,764,1096,28178610,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +45885,SAINT MARYS,OH,PRIMARY,6389,10830,202765355,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +5680,WOLCOTT,VT,PRIMARY,941,1662,28332837,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32115,DAYTONA BEACH,FL,PRIMARY,387,542,9855865,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +56264,MINNEOTA,MN,PRIMARY,1020,1825,29359630,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38462,HOHENWALD,TN,PRIMARY,4129,7770,103755133,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +78231,SAN ANTONIO,TX,PRIMARY,4066,7193,232799653,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +84157,SALT LAKE CITY,UT,PRIMARY,588,938,20472974,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +67480,SOLOMON,KS,PRIMARY,854,1560,29373470,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +39211,JACKSON,MS,PRIMARY,11198,18806,521231524,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92887,YORBA LINDA,CA,PRIMARY,9669,18153,823004537,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +76651,ITALY,TX,PRIMARY,1276,2328,43869221,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62889,TEXICO,IL,PRIMARY,399,759,11264705,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +78630,CEDAR PARK,TX,PRIMARY,804,1331,28792042,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +43977,FLUSHING,OH,PRIMARY,973,1675,26181628,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +47018,DILLSBORO,IN,PRIMARY,2085,3733,68085639,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +51551,MALVERN,IA,PRIMARY,816,1493,28969463,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +8530,LAMBERTVILLE,NJ,PRIMARY,3931,6511,257879296,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +10988,UNIONVILLE,NY,PRIMARY,363,629,13783721,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +2903,PROVIDENCE,RI,PRIMARY,3952,5160,156882227,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +81643,MESA,CO,PRIMARY,383,689,14428107,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +15541,FRIEDENS,PA,PRIMARY,1780,3264,56922657,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +34134,BONITA SPRINGS,FL,PRIMARY,6923,11428,570760748,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38460,GOODSPRING,TN,PRIMARY,596,1118,17892699,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +29620,ABBEVILLE,SC,PRIMARY,5554,10139,154549847,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28310,FORT BRAGG,NC,PRIMARY,3260,3863,65016523,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +64163,KANSAS CITY,MO,PRIMARY,360,564,12326442,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +74940,HOWE,OK,PRIMARY,864,1680,26304688,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +75232,DALLAS,TX,PRIMARY,12851,22153,333730368,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +91203,GLENDALE,CA,PRIMARY,6453,10833,219540443,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62613,ATHENS,IL,PRIMARY,1877,3489,73003434,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +6762,MIDDLEBURY,CT,PRIMARY,3760,6856,242736491,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +47987,VEEDERSBURG,IN,PRIMARY,1899,3447,56917481,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92198,SAN DIEGO,CA,PRIMARY,629,1033,33996926,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +63956,PATTERSON,MO,PRIMARY,386,740,7950037,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +85246,CHANDLER,AZ,PRIMARY,630,1016,26596817,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +8098,WOODSTOWN,NJ,PRIMARY,4297,7744,208318084,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +70451,NATALBANY,LA,PRIMARY,1123,2105,28543239,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49224,ALBION,MI,PRIMARY,5300,9318,142870046,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +23027,CARTERSVILLE,VA,PRIMARY,586,1070,18181935,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +72004,ALTHEIMER,AR,PRIMARY,657,1114,12143994,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62545,MECHANICSBURG,IL,PRIMARY,619,1115,22566283,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +46356,LOWELL,IN,PRIMARY,8090,14904,346309874,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +66951,KENSINGTON,KS,PRIMARY,364,650,7320184,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61125,ROCKFORD,IL,PRIMARY,270,408,8427726,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +20777,HIGHLAND,MD,PRIMARY,1595,3036,142205219,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +78270,SAN ANTONIO,TX,PRIMARY,409,650,13395100,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +84534,MONTEZUMA CREEK,UT,PRIMARY,385,711,12790009,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62627,CHANDLERVILLE,IL,PRIMARY,474,855,13679882,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +47978,RENSSELAER,IN,PRIMARY,5236,9430,167397542,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32038,FORT WHITE,FL,PRIMARY,3740,6648,106072477,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +55428,MINNEAPOLIS,MN,PRIMARY,14155,23451,491549095,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +44004,ASHTABULA,OH,PRIMARY,14605,24745,405089505,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +43065,POWELL,OH,PRIMARY,17969,33749,1406747373,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +15762,NICKTOWN,PA,PRIMARY,454,835,15653513,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32550,MIRAMAR BEACH,FL,PRIMARY,3548,5708,170514509,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +35044,CHILDERSBURG,AL,PRIMARY,3134,5729,93021157,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37016,AUBURNTOWN,TN,PRIMARY,392,714,12192821,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +74332,BIG CABIN,OK,PRIMARY,765,1478,23230395,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62441,MARSHALL,IL,PRIMARY,3465,6204,110639963,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +90274,PALOS VERDES PENINSULA,CA,PRIMARY,13159,24008,1404879976,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60550,SHABBONA,IL,PRIMARY,670,1179,22598110,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +65452,CROCKER,MO,PRIMARY,1352,2524,36433514,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +81521,FRUITA,CO,PRIMARY,6740,12464,276339638,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +78578,PORT ISABEL,TX,PRIMARY,4126,7482,96921761,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +56701,THIEF RIVER FALLS,MN,PRIMARY,6163,10548,210359029,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +14042,DELEVAN,NY,PRIMARY,1718,3060,50164400,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +6807,COS COB,CT,PRIMARY,3444,6107,444794403,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +43115,CLARKSBURG,OH,PRIMARY,567,1035,17613890,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +46016,ANDERSON,IN,PRIMARY,7658,12898,138099333,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +80513,BERTHOUD,CO,PRIMARY,5057,9316,244197618,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +35401,TUSCALOOSA,AL,PRIMARY,10214,16255,215081021,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +35452,COKER,AL,PRIMARY,1586,3077,57684991,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +31532,DENTON,GA,PRIMARY,297,573,8045990,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +1876,TEWKSBURY,MA,PRIMARY,14632,25645,790024913,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92308,APPLE VALLEY,CA,PRIMARY,14023,26211,459646224,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +40340,NICHOLASVILLE,KY,PRIMARY,423,726,10599725,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +45414,DAYTON,OH,PRIMARY,10415,17442,337111812,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +11238,BROOKLYN,NY,PRIMARY,26156,37151,1236922826,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49424,HOLLAND,MI,PRIMARY,20517,37715,817216179,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +67740,HOXIE,KS,PRIMARY,903,1606,24887087,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +11693,FAR ROCKAWAY,NY,PRIMARY,5423,8984,209039482,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +52574,MYSTIC,IA,PRIMARY,367,664,9625712,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27943,HATTERAS,NC,PRIMARY,398,648,9853980,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62448,NEWTON,IL,PRIMARY,2903,5182,84135218,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95052,SANTA CLARA,CA,PRIMARY,389,580,20793149,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +44413,EAST PALESTINE,OH,PRIMARY,3479,5943,100730827,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28344,GODWIN,NC,PRIMARY,1197,2305,35479609,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38017,COLLIERVILLE,TN,PRIMARY,21449,42115,1560357984,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +74362,PRYOR,OK,PRIMARY,663,1197,25116058,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95367,RIVERBANK,CA,PRIMARY,9078,17864,348987321,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +5763,PITTSFORD,VT,PRIMARY,1415,2481,55269375,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28739,HENDERSONVILLE,NC,PRIMARY,8327,14540,238609610,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +13069,FULTON,NY,PRIMARY,11158,19739,357501522,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28515,BAYBORO,NC,PRIMARY,885,1543,20752574,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49410,FOUNTAIN,MI,PRIMARY,777,1366,17997965,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +2667,WELLFLEET,MA,PRIMARY,1406,2145,34818321,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38614,CLARKSDALE,MS,PRIMARY,7768,14062,188124583,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +78935,ALLEYTON,TX,PRIMARY,368,689,15589470,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +29547,HAMER,SC,PRIMARY,1137,2092,29450259,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +88048,MESQUITE,NM,PRIMARY,1742,3476,36761875,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +90640,MONTEBELLO,CA,PRIMARY,26228,46577,863251749,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +70639,EVANS,LA,PRIMARY,251,479,8164089,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +54209,EGG HARBOR,WI,PRIMARY,748,1270,17683706,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60434,JOLIET,IL,PRIMARY,771,1213,22151646,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48653,ROSCOMMON,MI,PRIMARY,4473,7846,103814357,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +8091,WEST BERLIN,NJ,PRIMARY,2694,4550,102854475,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +7960,MORRISTOWN,NJ,PRIMARY,20272,33647,1862771371,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +80134,PARKER,CO,PRIMARY,24453,47361,1779414950,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +13202,SYRACUSE,NY,PRIMARY,1676,2520,41928158,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +31126,ATLANTA,GA,PRIMARY,494,563,7143345,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38083,MILLINGTON,TN,PRIMARY,324,539,11173852,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +17316,EAST BERLIN,PA,PRIMARY,4009,7396,166843100,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +70057,HAHNVILLE,LA,PRIMARY,1862,3323,71163076,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +43611,TOLEDO,OH,PRIMARY,9019,14938,275006057,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +26801,BAKER,WV,PRIMARY,520,933,16143223,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33957,SANIBEL,FL,PRIMARY,3247,5478,171898751,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +46156,MILROY,IN,PRIMARY,642,1208,18163354,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +72101,MC CRORY,AR,PRIMARY,1356,2552,35363360,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +24541,DANVILLE,VA,PRIMARY,12241,21053,352550953,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38135,MEMPHIS,TN,PRIMARY,13502,24884,624309598,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +13655,HOGANSBURG,NY,PRIMARY,1086,2150,32842598,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +54456,NEILLSVILLE,WI,PRIMARY,2925,5071,82826559,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +23847,EMPORIA,VA,PRIMARY,5915,10290,161830570,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +80477,STEAMBOAT SPRINGS,CO,PRIMARY,4376,6952,196199680,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48237,OAK PARK,MI,PRIMARY,14384,23430,432020412,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28111,MONROE,NC,PRIMARY,829,1460,26365462,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +24087,ELLISTON,VA,PRIMARY,1733,3135,54778025,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48335,FARMINGTON,MI,PRIMARY,11621,19642,591500096,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37301,ALTAMONT,TN,PRIMARY,615,1213,11973497,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +56542,FOSSTON,MN,PRIMARY,1322,2314,36056167,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +7021,ESSEX FELLS,NJ,PRIMARY,1091,2007,166956457,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +74017,CLAREMORE,OK,PRIMARY,10981,20237,395208643,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +51234,BOYDEN,IA,PRIMARY,561,1112,18543957,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48112,BELLEVILLE,MI,PRIMARY,387,607,13600635,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +4463,MILO,ME,PRIMARY,1146,2019,27029236,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +5047,HARTFORD,VT,PRIMARY,445,698,14200608,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +74833,CASTLE,OK,PRIMARY,274,525,7439989,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +80517,ESTES PARK,CO,PRIMARY,5115,8636,146219457,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +18854,WYSOX,PA,PRIMARY,766,1298,20466138,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +36551,LOXLEY,AL,PRIMARY,3314,6160,111072944,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +97429,DAYS CREEK,OR,PRIMARY,317,575,7345532,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27011,BOONVILLE,NC,PRIMARY,2237,4153,63701740,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +76226,ARGYLE,TX,PRIMARY,7253,14869,655673789,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +16509,ERIE,PA,PRIMARY,13751,23284,496141340,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +52809,DAVENPORT,IA,PRIMARY,284,433,6242496,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +44444,NEWTON FALLS,OH,PRIMARY,5199,8868,150414836,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30038,LITHONIA,GA,PRIMARY,16132,27454,560542732,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +50247,STATE CENTER,IA,PRIMARY,1063,1995,38420148,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +77662,VIDOR,TX,PRIMARY,10480,19641,375640152,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +64688,WHEELING,MO,PRIMARY,251,465,5613218,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +87004,BERNALILLO,NM,PRIMARY,4742,8129,129793741,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +3844,HAMPTON FALLS,NH,PRIMARY,1188,2143,81832537,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +66748,HUMBOLDT,KS,PRIMARY,1274,2288,37506705,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +19442,KIMBERTON,PA,PRIMARY,585,1001,33030996,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +23227,RICHMOND,VA,PRIMARY,12451,19421,412004706,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +7726,ENGLISHTOWN,NJ,PRIMARY,21395,39570,1469011742,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27051,WALKERTOWN,NC,PRIMARY,3465,6248,106779805,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +71357,NEWELLTON,LA,PRIMARY,816,1496,20967676,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +15224,PITTSBURGH,PA,PRIMARY,5114,7053,129720383,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +51529,DUNLAP,IA,PRIMARY,793,1409,24448432,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +45769,POMEROY,OH,PRIMARY,2613,4757,72556319,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +11232,BROOKLYN,NY,PRIMARY,10584,17338,294496128,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +25502,APPLE GROVE,WV,PRIMARY,412,800,12391414,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +77836,CALDWELL,TX,PRIMARY,4813,8783,156682163,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37853,ROCKFORD,TN,PRIMARY,1737,3124,54004743,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33875,SEBRING,FL,PRIMARY,4697,8331,126159569,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +93927,GREENFIELD,CA,PRIMARY,6092,12788,184526450,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +3833,EXETER,NH,PRIMARY,10490,18286,571105228,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +97623,BONANZA,OR,PRIMARY,904,1736,20678294,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +79789,WINK,TX,PRIMARY,428,791,17749920,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +16843,HYDE,PA,PRIMARY,418,673,8637578,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92866,ORANGE,CA,PRIMARY,6759,11381,275673039,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +25064,DUNBAR,WV,PRIMARY,4644,7497,132687650,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +18966,SOUTHAMPTON,PA,PRIMARY,20813,36515,1110748379,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28052,GASTONIA,NC,PRIMARY,13289,24059,353456373,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +54868,RICE LAKE,WI,PRIMARY,7526,12887,236583824,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +21084,JARRETTSVILLE,MD,PRIMARY,3804,6986,204002723,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +19952,HARRINGTON,DE,PRIMARY,4655,8258,141512863,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61454,LOMAX,IL,PRIMARY,328,605,8724588,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32059,LEE,FL,PRIMARY,984,1817,27174190,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +70516,BRANCH,LA,PRIMARY,577,1094,19605048,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +2839,MAPLEVILLE,RI,PRIMARY,906,1601,41396175,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +87015,EDGEWOOD,NM,PRIMARY,6008,10942,235758194,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37374,SEQUATCHIE,TN,PRIMARY,684,1297,18542038,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +58042,HARWOOD,ND,PRIMARY,625,1237,32477395,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62338,FOWLER,IL,PRIMARY,735,1341,27415548,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +43344,RICHWOOD,OH,PRIMARY,2634,4746,94272492,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +2812,CAROLINA,RI,PRIMARY,749,1368,37503525,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +29402,CHARLESTON,SC,PRIMARY,349,531,15751160,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28391,STEDMAN,NC,PRIMARY,2401,4502,84097929,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +23069,HANOVER,VA,PRIMARY,1452,2561,57841342,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +46218,INDIANAPOLIS,IN,PRIMARY,12732,20903,241512037,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +83110,AFTON,WY,PRIMARY,1822,3543,65872797,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +70003,METAIRIE,LA,PRIMARY,18927,32396,679783342,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27046,SANDY RIDGE,NC,PRIMARY,917,1709,27985887,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +72761,SILOAM SPRINGS,AR,PRIMARY,8095,15283,259389835,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +2659,SOUTH CHATHAM,MA,PRIMARY,594,985,16124247,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +97208,PORTLAND,OR,PRIMARY,1160,1533,31521968,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +85626,PIRTLEVILLE,AZ,PRIMARY,419,834,8196655,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +20136,BRISTOW,VA,PRIMARY,10619,22107,845849788,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +89015,HENDERSON,NV,PRIMARY,17849,30967,735729333,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +43779,SARAHSVILLE,OH,PRIMARY,481,835,13164372,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +96863,M C B H KANEOHE BAY,HI,PRIMARY,2310,2556,39469202,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +41601,ALLEN,KY,PRIMARY,514,957,17423017,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +40390,WILMORE,KY,PRIMARY,2483,4629,72942488,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28431,CHADBOURN,NC,PRIMARY,2786,5116,70942390,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +25514,FORT GAY,WV,PRIMARY,1373,2774,42518481,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +13732,APALACHIN,NY,PRIMARY,3966,7253,167062976,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33433,BOCA RATON,FL,PRIMARY,21240,34128,828003054,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +99009,ELK,WA,PRIMARY,1617,2991,51925836,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +5658,MARSHFIELD,VT,PRIMARY,697,1228,19008014,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48749,OMER,MI,PRIMARY,549,952,13287085,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +34949,FORT PIERCE,FL,PRIMARY,3242,5198,90134616,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +76092,SOUTHLAKE,TX,PRIMARY,11594,24319,1906589831,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +5738,CUTTINGSVILLE,VT,PRIMARY,615,1058,20967960,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +18351,PORTLAND,PA,PRIMARY,347,559,11051612,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +55127,SAINT PAUL,MN,PRIMARY,9099,15796,673842538,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +74832,CARNEY,OK,PRIMARY,378,702,10362996,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +44224,STOW,OH,PRIMARY,20030,33898,875868822,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37381,SPRING CITY,TN,PRIMARY,3749,6875,104001316,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48327,WATERFORD,MI,PRIMARY,10732,18206,413035396,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +36783,THOMASTON,AL,PRIMARY,348,661,9995867,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38486,WESTPOINT,TN,PRIMARY,425,811,11391714,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +56514,BARNESVILLE,MN,PRIMARY,1683,3111,62499955,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +45841,JENERA,OH,PRIMARY,483,865,15970273,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +54451,MEDFORD,WI,PRIMARY,5445,9722,176858106,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +24631,OAKWOOD,VA,PRIMARY,522,968,15688367,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +56589,WAUBUN,MN,PRIMARY,801,1423,19265259,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +63434,BETHEL,MO,PRIMARY,262,479,5759705,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +72330,DYESS,AR,PRIMARY,276,548,6954655,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +40210,LOUISVILLE,KY,PRIMARY,5918,9539,108705958,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33197,MIAMI,FL,PRIMARY,628,1028,15702441,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +88032,DONA ANA,NM,PRIMARY,983,1789,21136525,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30132,DALLAS,GA,PRIMARY,13113,25321,569160215,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +11563,LYNBROOK,NY,PRIMARY,11104,19016,606418035,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +91733,SOUTH EL MONTE,CA,PRIMARY,16472,31052,430750015,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +14559,SPENCERPORT,NY,PRIMARY,8942,15998,365362043,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95436,FORESTVILLE,CA,PRIMARY,2855,4585,107428617,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +13752,DELANCEY,NY,PRIMARY,361,616,9364710,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +77586,SEABROOK,TX,PRIMARY,9752,17586,657995710,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +97054,DEER ISLAND,OR,PRIMARY,556,1020,22216491,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +13203,SYRACUSE,NY,PRIMARY,6280,9714,171340485,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95246,MOUNTAIN RANCH,CA,PRIMARY,832,1401,22405016,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27828,FARMVILLE,NC,PRIMARY,3759,6921,114599842,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +87412,BLANCO,NM,PRIMARY,315,579,11722260,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +71941,DONALDSON,AR,PRIMARY,766,1479,21580809,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +55106,SAINT PAUL,MN,PRIMARY,21868,36448,635661245,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +41719,BONNYMAN,KY,PRIMARY,645,1242,18487449,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +15666,MOUNT PLEASANT,PA,PRIMARY,7663,13112,237676324,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48321,AUBURN HILLS,MI,PRIMARY,287,445,9772270,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +64622,BOGARD,MO,PRIMARY,277,501,7234999,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +13438,REMSEN,NY,PRIMARY,1665,2899,57511036,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27965,POPLAR BRANCH,NC,PRIMARY,253,424,6252364,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +65441,BOURBON,MO,PRIMARY,2297,4205,64970980,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60154,WESTCHESTER,IL,PRIMARY,8905,15100,385973046,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +25039,CEDAR GROVE,WV,PRIMARY,485,890,14469525,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +66509,MAYETTA,KS,PRIMARY,1211,2120,34821740,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +15022,CHARLEROI,PA,PRIMARY,5124,8570,150473679,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +97131,NEHALEM,OR,PRIMARY,982,1652,19921931,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +51503,COUNCIL BLUFFS,IA,PRIMARY,16612,29102,683574290,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +17235,MARION,PA,PRIMARY,349,596,10153044,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28127,NEW LONDON,NC,PRIMARY,2697,4990,102116917,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32503,PENSACOLA,FL,PRIMARY,14501,23321,471846283,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +50263,WAUKEE,IA,PRIMARY,6423,12093,412354771,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48045,HARRISON TOWNSHIP,MI,PRIMARY,12646,20828,518189512,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32189,SATSUMA,FL,PRIMARY,2102,3641,43126165,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +76119,FORT WORTH,TX,PRIMARY,15694,28696,349381901,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62633,EASTON,IL,PRIMARY,337,597,10032671,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +3220,BELMONT,NH,PRIMARY,3554,6141,129018031,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +94955,PETALUMA,CA,PRIMARY,349,564,15086176,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +42633,MONTICELLO,KY,PRIMARY,7479,14080,174728326,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +64744,EL DORADO SPRINGS,MO,PRIMARY,3177,5807,68789500,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +5488,SWANTON,VT,PRIMARY,3644,6347,130712847,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +56093,WASECA,MN,PRIMARY,5904,10276,198193116,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30467,SYLVANIA,GA,PRIMARY,5232,9444,137885942,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +1254,RICHMOND,MA,PRIMARY,632,1068,39209955,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37303,ATHENS,TN,PRIMARY,9889,18135,299186543,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61752,LE ROY,IL,PRIMARY,2078,3813,85876769,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95969,PARADISE,CA,PRIMARY,11201,19114,297576409,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +14468,HILTON,NY,PRIMARY,8994,16219,349425027,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +52591,SIGOURNEY,IA,PRIMARY,1382,2444,38544938,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +68701,NORFOLK,NE,PRIMARY,13671,24172,482863129,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +44837,GREENWICH,OH,PRIMARY,1891,3472,51515139,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98948,TOPPENISH,WA,PRIMARY,4949,9422,121897135,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95122,SAN JOSE,CA,PRIMARY,22062,41936,701153563,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +6897,WILTON,CT,PRIMARY,8651,16501,1556090212,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +52405,CEDAR RAPIDS,IA,PRIMARY,11605,20107,452457222,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +91739,RANCHO CUCAMONGA,CA,PRIMARY,13201,25587,840812511,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95113,SAN JOSE,CA,PRIMARY,781,1049,37924110,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +36527,SPANISH FORT,AL,PRIMARY,4841,9416,271939844,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +44662,NAVARRE,OH,PRIMARY,4586,7868,139038676,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +25033,BUFFALO,WV,PRIMARY,939,1817,34788657,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +71232,DELHI,LA,PRIMARY,2746,5067,80564652,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +43734,DUNCAN FALLS,OH,PRIMARY,568,1038,18828277,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +12985,SCHUYLER FALLS,NY,PRIMARY,529,931,16204574,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28776,SKYLAND,NC,PRIMARY,535,876,17091498,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92562,MURRIETA,CA,PRIMARY,23762,45927,1191210837,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37080,JOELTON,TN,PRIMARY,3557,6307,129951131,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30427,GLENNVILLE,GA,PRIMARY,4135,7747,111936916,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +90232,CULVER CITY,CA,PRIMARY,8201,13073,465313191,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +53934,FRIENDSHIP,WI,PRIMARY,2025,3431,45525291,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +56222,CLARA CITY,MN,PRIMARY,967,1724,25418961,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +26519,ALBRIGHT,WV,PRIMARY,705,1311,22142722,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +55388,WATERTOWN,MN,PRIMARY,2495,4556,109001030,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +66044,LAWRENCE,KS,PRIMARY,10860,16052,310085286,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +29909,OKATIE,SC,PRIMARY,7881,13658,195389288,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48041,MEMPHIS,MI,PRIMARY,2099,3901,83506554,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +90061,LOS ANGELES,CA,PRIMARY,10076,18477,238903309,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +74137,TULSA,OK,PRIMARY,12450,22445,1042828497,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +7876,SUCCASUNNA,NJ,PRIMARY,5329,9849,354128006,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +12180,TROY,NY,PRIMARY,22753,36062,781174813,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +46507,BRISTOL,IN,PRIMARY,4426,8126,185424431,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +16033,EVANS CITY,PA,PRIMARY,3101,5421,120748627,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +13642,GOUVERNEUR,NY,PRIMARY,3638,6605,110043459,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +56352,MELROSE,MN,PRIMARY,2572,4670,79530423,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +77272,HOUSTON,TX,PRIMARY,574,952,17871728,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +96753,KIHEI,HI,PRIMARY,12196,19553,425439028,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +77563,HITCHCOCK,TX,PRIMARY,4006,6870,145494518,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +47303,MUNCIE,IN,PRIMARY,9663,15957,246705803,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49740,HARBOR SPRINGS,MI,PRIMARY,3741,6424,118544746,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +13073,GROTON,NY,PRIMARY,2990,5355,110391836,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48809,BELDING,MI,PRIMARY,4799,8802,152588977,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27713,DURHAM,NC,PRIMARY,22527,37767,1145303534,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +39452,LUCEDALE,MS,PRIMARY,9858,19596,363756494,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +77531,CLUTE,TX,PRIMARY,6417,11888,239538094,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30241,LAGRANGE,GA,PRIMARY,9927,18185,282897258,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +36536,FOLEY,AL,PRIMARY,1029,1785,27615352,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60079,WAUKEGAN,IL,PRIMARY,569,931,14368629,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +44882,SYCAMORE,OH,PRIMARY,1427,2512,42929151,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +93667,TOLLHOUSE,CA,PRIMARY,982,1864,38484975,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +1852,LOWELL,MA,PRIMARY,15022,23950,573609990,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +16928,KNOXVILLE,PA,PRIMARY,602,1100,13995939,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +58638,HEBRON,ND,PRIMARY,461,844,11962712,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +65020,CAMDENTON,MO,PRIMARY,6388,11399,165682464,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +29608,GREENVILLE,SC,PRIMARY,381,637,10473673,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +68037,LOUISVILLE,NE,PRIMARY,1065,1908,43614107,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37650,ERWIN,TN,PRIMARY,5270,9419,149030757,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +44680,STRASBURG,OH,PRIMARY,1999,3492,63067186,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +52069,PRESTON,IA,PRIMARY,752,1356,23630672,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +35474,MOUNDVILLE,AL,PRIMARY,2712,5149,92262755,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +18092,ZIONSVILLE,PA,PRIMARY,1559,2807,83296501,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +40143,HARDINSBURG,KY,PRIMARY,2322,4268,68638106,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38104,MEMPHIS,TN,PRIMARY,10387,15241,435448708,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +93666,SULTANA,CA,PRIMARY,407,842,8590796,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +79521,HASKELL,TX,PRIMARY,1520,2731,36788484,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +15639,HUNKER,PA,PRIMARY,978,1723,35257131,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +44507,YOUNGSTOWN,OH,PRIMARY,2012,3357,29382008,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +39120,NATCHEZ,MS,PRIMARY,11745,20822,310840632,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +31624,AXSON,GA,PRIMARY,459,908,11809567,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92372,PINON HILLS,CA,PRIMARY,1972,3747,75581667,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32245,JACKSONVILLE,FL,PRIMARY,616,954,19787441,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +8901,NEW BRUNSWICK,NJ,PRIMARY,16776,28187,495595423,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28001,ALBEMARLE,NC,PRIMARY,10376,18787,314253696,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +8840,METUCHEN,NJ,PRIMARY,8479,15163,598124729,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98513,OLYMPIA,WA,PRIMARY,14094,25697,596758047,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60042,ISLAND LAKE,IL,PRIMARY,4229,7500,196089463,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +93930,KING CITY,CA,PRIMARY,6008,12376,192372786,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +65470,FALCON,MO,PRIMARY,405,793,9531033,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +59433,DUTTON,MT,PRIMARY,302,522,7419720,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61850,INDIANOLA,IL,PRIMARY,264,493,7541580,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +43952,STEUBENVILLE,OH,PRIMARY,7755,12725,202735860,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +11786,SHOREHAM,NY,PRIMARY,2963,5684,199871422,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +45849,GROVER HILL,OH,PRIMARY,558,993,14985287,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92075,SOLANA BEACH,CA,PRIMARY,6695,10911,481295872,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +56159,MOUNTAIN LAKE,MN,PRIMARY,1239,2247,32183747,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32562,GULF BREEZE,FL,PRIMARY,419,684,19907223,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +55975,SPRING VALLEY,MN,PRIMARY,2088,3746,73572794,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27826,FAIRFIELD,NC,PRIMARY,264,491,6260077,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48475,UBLY,MI,PRIMARY,1402,2465,36328566,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27617,RALEIGH,NC,PRIMARY,7515,12300,484966226,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +91344,GRANADA HILLS,CA,PRIMARY,23384,41602,1102040461,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +4352,MOUNT VERNON,ME,PRIMARY,753,1307,25438393,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +5402,BURLINGTON,VT,PRIMARY,556,728,16805420,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +1506,BROOKFIELD,MA,PRIMARY,1665,2920,68732705,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30647,ILA,GA,PRIMARY,272,498,8140291,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +59323,COLSTRIP,MT,PRIMARY,1082,2035,58024076,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +70125,NEW ORLEANS,LA,PRIMARY,5234,8350,178284259,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48184,WAYNE,MI,PRIMARY,8255,13869,242363273,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32818,ORLANDO,FL,PRIMARY,20019,34498,546764528,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +47443,LYONS,IN,PRIMARY,517,942,14494654,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +73036,EL RENO,OK,PRIMARY,7193,12754,213297307,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +86043,SECOND MESA,AZ,PRIMARY,590,1121,12271709,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +54494,WISCONSIN RAPIDS,WI,PRIMARY,13663,23860,458696321,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +64097,WELLINGTON,MO,PRIMARY,637,1175,21678040,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60089,BUFFALO GROVE,IL,PRIMARY,21901,39466,1356499180,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95117,SAN JOSE,CA,PRIMARY,12721,22030,666191379,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +39202,JACKSON,MS,PRIMARY,3284,5162,103618280,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +42048,HARDIN,KY,PRIMARY,950,1767,21529327,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +75845,GROVETON,TX,PRIMARY,1089,2052,30798298,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +78716,AUSTIN,TX,PRIMARY,511,811,25638980,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37891,WHITESBURG,TN,PRIMARY,1523,2848,41515006,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48607,SAGINAW,MI,PRIMARY,584,871,7579846,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +10602,WHITE PLAINS,NY,PRIMARY,541,826,19730717,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +24095,GOODVIEW,VA,PRIMARY,2083,3782,71992357,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +3458,PETERBOROUGH,NH,PRIMARY,3405,5705,135735376,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +22821,DAYTON,VA,PRIMARY,2618,4830,74790158,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +46368,PORTAGE,IN,PRIMARY,18487,32445,713943132,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98568,OAKVILLE,WA,PRIMARY,1128,1966,32040078,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +39096,LORMAN,MS,PRIMARY,860,1507,20656446,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +12472,ROSENDALE,NY,PRIMARY,896,1409,30585447,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +3462,SPOFFORD,NH,PRIMARY,877,1539,46464738,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98296,SNOHOMISH,WA,PRIMARY,11048,21459,736882094,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +42718,CAMPBELLSVILLE,KY,PRIMARY,9728,17818,264063372,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98635,LYLE,WA,PRIMARY,733,1308,22438525,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +4220,BUCKFIELD,ME,PRIMARY,1400,2457,43905680,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28020,CASAR,NC,PRIMARY,1069,2024,29408223,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48464,OTTER LAKE,MI,PRIMARY,985,1821,28884205,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +64735,CLINTON,MO,PRIMARY,5994,10581,167771555,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27882,SPRING HOPE,NC,PRIMARY,3256,5961,94047347,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +15920,ARMAGH,PA,PRIMARY,506,872,16182812,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98377,RANDLE,WA,PRIMARY,844,1440,23690039,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +6907,STAMFORD,CT,PRIMARY,4783,8014,324883757,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +3812,BARTLETT,NH,PRIMARY,323,518,7199051,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +52625,DONNELLSON,IA,PRIMARY,1270,2301,39773462,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +54104,ATHELSTANE,WI,PRIMARY,487,842,9615288,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30152,KENNESAW,GA,PRIMARY,17602,33285,1107231532,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37891,WHITESBURG,TN,PRIMARY,1523,2848,41515006,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +26456,WEST UNION,WV,PRIMARY,1451,2708,38198185,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98038,MAPLE VALLEY,WA,PRIMARY,13596,26284,868946579,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +14755,LITTLE VALLEY,NY,PRIMARY,1225,2136,36477187,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +76548,HARKER HEIGHTS,TX,PRIMARY,10661,20541,422227386,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +80962,COLORADO SPRINGS,CO,PRIMARY,483,827,21714643,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +18840,SAYRE,PA,PRIMARY,5014,8644,174076971,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +19903,DOVER,DE,PRIMARY,516,804,14735224,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +34142,IMMOKALEE,FL,PRIMARY,5475,10418,112722244,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +64077,ORRICK,MO,PRIMARY,878,1617,30004694,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +2125,DORCHESTER,MA,PRIMARY,14981,22994,501492850,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +71660,NEW EDINBURG,AR,PRIMARY,384,724,11289280,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +8052,MAPLE SHADE,NJ,PRIMARY,9877,15607,376208480,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +36608,MOBILE,AL,PRIMARY,15750,27204,656498067,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98606,BRUSH PRAIRIE,WA,PRIMARY,4142,8043,211769233,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +5872,WEST CHARLESTON,VT,PRIMARY,339,583,7858471,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +49287,TIPTON,MI,PRIMARY,1034,1917,40179950,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +12468,PRATTSVILLE,NY,PRIMARY,513,881,14590558,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +10474,BRONX,NY,PRIMARY,4369,7377,97578251,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +57366,PARKSTON,SD,PRIMARY,1129,2005,31558453,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +99741,GALENA,AK,PRIMARY,269,441,8949655,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +21224,BALTIMORE,MD,PRIMARY,22061,33150,876200844,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +55316,CHAMPLIN,MN,PRIMARY,11573,20946,614760299,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +7407,ELMWOOD PARK,NJ,PRIMARY,9934,17030,405823236,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37217,NASHVILLE,TN,PRIMARY,14205,23166,409496387,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +39440,LAUREL,MS,PRIMARY,7730,13723,208360016,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +70835,BATON ROUGE,LA,PRIMARY,370,604,10424012,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +14534,PITTSFORD,NY,PRIMARY,16194,30109,1195986419,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +6902,STAMFORD,CT,PRIMARY,29001,47130,1753889885,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +2830,HARRISVILLE,RI,PRIMARY,3056,5420,146989186,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +54412,AUBURNDALE,WI,PRIMARY,1034,1887,36383977,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +29401,CHARLESTON,SC,PRIMARY,4007,5783,242908839,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +43105,BALTIMORE,OH,PRIMARY,3984,7064,159708382,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +7424,LITTLE FALLS,NJ,PRIMARY,12172,20128,585762325,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +44444,NEWTON FALLS,OH,PRIMARY,5199,8868,150414836,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +11105,ASTORIA,NY,PRIMARY,19011,28457,722693754,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +2108,BOSTON,MA,PRIMARY,2348,3312,388783474,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +91394,GRANADA HILLS,CA,PRIMARY,317,493,10565696,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98947,TIETON,WA,PRIMARY,1081,2111,32403858,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +39341,MACON,MS,PRIMARY,3192,5664,62245351,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38337,GADSDEN,TN,PRIMARY,617,1139,17603973,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +74048,NOWATA,OK,PRIMARY,2447,4533,72150270,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +13605,ADAMS,NY,PRIMARY,1990,3684,66542359,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +65247,EXCELLO,MO,PRIMARY,255,465,7506334,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +75960,MOSCOW,TX,PRIMARY,382,717,11714217,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +41003,BERRY,KY,PRIMARY,1064,2054,32740893,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +54534,HURLEY,WI,PRIMARY,1213,1996,30144904,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +55969,ROLLINGSTONE,MN,PRIMARY,526,968,18179186,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +26257,COALTON,WV,PRIMARY,312,617,10235062,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30123,CASSVILLE,GA,PRIMARY,400,728,10254856,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61024,DURAND,IL,PRIMARY,1298,2360,49407398,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +52748,ELDRIDGE,IA,PRIMARY,4082,7608,199334547,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +5733,BRANDON,VT,PRIMARY,2818,4770,87545419,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +65043,HOLTS SUMMIT,MO,PRIMARY,4532,8226,155701367,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +45387,YELLOW SPRINGS,OH,PRIMARY,2812,4576,102001851,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +5866,SHEFFIELD,VT,PRIMARY,323,573,8362085,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +12564,PAWLING,NY,PRIMARY,3344,5933,185634365,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +51449,LAKE CITY,IA,PRIMARY,964,1711,26237453,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +17002,ALLENSVILLE,PA,PRIMARY,270,513,5518469,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +99517,ANCHORAGE,AK,PRIMARY,8337,12984,368564168,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +5068,SOUTH ROYALTON,VT,PRIMARY,1524,2458,45320281,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +77320,HUNTSVILLE,TX,PRIMARY,8454,14906,254326480,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +12764,NARROWSBURG,NY,PRIMARY,773,1300,22315129,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61081,STERLING,IL,PRIMARY,10835,18732,348525244,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +35011,ALEXANDER CITY,AL,PRIMARY,763,1334,24855050,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33419,WEST PALM BEACH,FL,PRIMARY,567,915,14880762,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28072,GRANITE QUARRY,NC,PRIMARY,685,1182,20002156,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95825,SACRAMENTO,CA,PRIMARY,13577,20027,413966183,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32241,JACKSONVILLE,FL,PRIMARY,704,1113,25647121,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +31038,HILLSBORO,GA,PRIMARY,381,697,10703049,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +1730,BEDFORD,MA,PRIMARY,6588,11940,472027119,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +31018,DAVISBORO,GA,PRIMARY,535,978,13778028,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +6902,STAMFORD,CT,PRIMARY,29001,47130,1753889885,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +5734,BRIDPORT,VT,PRIMARY,636,1087,17995407,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +77009,HOUSTON,TX,PRIMARY,16263,27460,620494819,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +12809,ARGYLE,NY,PRIMARY,1685,2994,55154197,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +15948,REVLOC,PA,PRIMARY,284,523,8316497,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +14801,ADDISON,NY,PRIMARY,2212,4062,65839037,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +2135,BRIGHTON,MA,PRIMARY,21268,27370,855471186,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +7110,NUTLEY,NJ,PRIMARY,14513,25001,882251300,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +31054,MC INTYRE,GA,PRIMARY,750,1365,19033239,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +85302,GLENDALE,AZ,PRIMARY,15451,26533,504502805,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92868,ORANGE,CA,PRIMARY,9575,16174,369443577,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +58533,ELGIN,ND,PRIMARY,463,772,8081153,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +64850,NEOSHO,MO,PRIMARY,9846,18271,270357486,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +76028,BURLESON,TX,PRIMARY,25425,48315,1206427469,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +13839,SIDNEY CENTER,NY,PRIMARY,608,1088,18102112,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37602,JOHNSON CITY,TN,PRIMARY,586,898,23142996,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +10704,YONKERS,NY,PRIMARY,15745,25409,706145477,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +42721,CANEYVILLE,KY,PRIMARY,1403,2761,35343402,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37082,KINGSTON SPRINGS,TN,PRIMARY,2977,5512,132663068,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +13320,CHERRY VALLEY,NY,PRIMARY,934,1642,24912132,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +13464,SMYRNA,NY,PRIMARY,478,851,12885849,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +18325,CANADENSIS,PA,PRIMARY,1188,2012,37934925,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +34145,MARCO ISLAND,FL,PRIMARY,7444,12812,367150533,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27284,KERNERSVILLE,NC,PRIMARY,22754,42019,997453411,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +31625,BARNEY,GA,PRIMARY,468,857,12142370,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +71237,EPPS,LA,PRIMARY,517,975,14290608,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37082,KINGSTON SPRINGS,TN,PRIMARY,2977,5512,132663068,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +99655,QUINHAGAK,AK,PRIMARY,263,470,3903253,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +43442,LINDSEY,OH,PRIMARY,511,883,16798678,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33133,MIAMI,FL,PRIMARY,15579,24458,1042707180,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +16501,ERIE,PA,PRIMARY,614,778,6263221,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +29130,RIDGEWAY,SC,PRIMARY,2806,4927,93677948,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +12428,ELLENVILLE,NY,PRIMARY,2601,4494,76147839,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92625,CORONA DEL MAR,CA,PRIMARY,6986,10985,697736150,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +78467,CORPUS CHRISTI,TX,PRIMARY,479,799,11633418,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +86003,FLAGSTAFF,AZ,PRIMARY,1417,2352,46059771,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60564,NAPERVILLE,IL,PRIMARY,17498,36024,1772224608,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95012,CASTROVILLE,CA,PRIMARY,4326,8542,126955739,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +19606,READING,PA,PRIMARY,17255,29985,723273693,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +45849,GROVER HILL,OH,PRIMARY,558,993,14985287,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37939,KNOXVILLE,TN,PRIMARY,310,480,13270687,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27920,BUXTON,NC,PRIMARY,814,1379,19072639,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +56545,GARY,MN,PRIMARY,352,614,7464491,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +31031,GORDON,GA,PRIMARY,2649,4822,74535288,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30907,AUGUSTA,GA,PRIMARY,22313,40607,1003729909,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32341,MADISON,FL,PRIMARY,529,911,13438499,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +8869,RARITAN,NJ,PRIMARY,3424,5803,175605743,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +12464,PHOENICIA,NY,PRIMARY,520,816,13164275,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +56044,HENDERSON,MN,PRIMARY,995,1755,37014136,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +86043,SECOND MESA,AZ,PRIMARY,590,1121,12271709,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37029,BURNS,TN,PRIMARY,2424,4593,105799260,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +67837,COPELAND,KS,PRIMARY,357,703,7189382,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +8051,MANTUA,NJ,PRIMARY,5474,9204,262368742,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +50436,FOREST CITY,IA,PRIMARY,2723,4832,86047944,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +59443,FORT SHAW,MT,PRIMARY,305,545,8399557,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +70361,HOUMA,LA,PRIMARY,1036,1700,40922277,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37752,HARROGATE,TN,PRIMARY,2616,4847,77573429,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60632,CHICAGO,IL,PRIMARY,32858,63860,877276155,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +79508,BUFFALO GAP,TX,PRIMARY,609,1070,19949804,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95363,PATTERSON,CA,PRIMARY,9127,18121,354778180,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +69043,STRATTON,NE,PRIMARY,264,468,5130200,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61722,ARROWSMITH,IL,PRIMARY,267,492,8659985,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +14032,CLARENCE CENTER,NY,PRIMARY,3566,6906,217101920,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +41092,VERONA,KY,PRIMARY,1572,2977,65833618,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +36451,GROVE HILL,AL,PRIMARY,2098,4014,64887195,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +7640,HARRINGTON PARK,NJ,PRIMARY,2279,4361,194366466,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27842,HENRICO,NC,PRIMARY,656,1123,15629016,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +90807,LONG BEACH,CA,PRIMARY,15959,26420,795362567,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61755,MACKINAW,IL,PRIMARY,2116,4031,93516788,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37082,KINGSTON SPRINGS,TN,PRIMARY,2977,5512,132663068,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30436,LYONS,GA,PRIMARY,4355,8258,112095015,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +2631,BREWSTER,MA,PRIMARY,5179,8575,167998522,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +85754,TUCSON,AZ,PRIMARY,852,1399,33845636,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +87103,ALBUQUERQUE,NM,PRIMARY,425,611,15513655,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +56232,DAWSON,MN,PRIMARY,1111,1970,30294887,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +36401,EVERGREEN,AL,PRIMARY,3190,5813,78778511,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +72769,SUMMERS,AR,PRIMARY,370,707,10302981,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38663,RIPLEY,MS,PRIMARY,5106,9444,132042313,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48301,BLOOMFIELD HILLS,MI,PRIMARY,7143,12890,669214343,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +93103,SANTA BARBARA,CA,PRIMARY,8204,13621,343916881,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +15146,MONROEVILLE,PA,PRIMARY,15256,25238,584154297,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32757,MOUNT DORA,FL,PRIMARY,9353,16168,318130210,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +85702,TUCSON,AZ,PRIMARY,643,959,17479478,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32091,STARKE,FL,PRIMARY,6234,11250,178806243,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +29250,COLUMBIA,SC,PRIMARY,285,418,11366477,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +31058,MAUK,GA,PRIMARY,607,1172,16528710,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +54902,OSHKOSH,WI,PRIMARY,11496,18650,363524314,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +77878,SNOOK,TX,PRIMARY,451,813,13287452,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30144,KENNESAW,GA,PRIMARY,21815,38067,948599536,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +10520,CROTON ON HUDSON,NY,PRIMARY,6109,10857,524242389,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95337,MANTECA,CA,PRIMARY,11853,22663,529597186,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +4292,SUMNER,ME,PRIMARY,371,678,10918039,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +97118,GARIBALDI,OR,PRIMARY,457,738,9728423,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +15541,FRIEDENS,PA,PRIMARY,1780,3264,56922657,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +76518,BUCKHOLTS,TX,PRIMARY,537,993,14450698,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +8270,WOODBINE,NJ,PRIMARY,3431,6102,138499266,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +15955,SIDMAN,PA,PRIMARY,959,1699,30975871,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +30302,ATLANTA,GA,PRIMARY,420,564,11777761,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +35172,TRAFFORD,AL,PRIMARY,1195,2333,40016483,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +46635,SOUTH BEND,IN,PRIMARY,3068,5336,108187588,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +45217,CINCINNATI,OH,PRIMARY,3209,5071,89786871,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +16929,LAWRENCEVILLE,PA,PRIMARY,1039,1840,28415690,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +21040,EDGEWOOD,MD,PRIMARY,11222,19332,409405527,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +65753,SPARTA,MO,PRIMARY,1970,3764,55164289,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37097,LOBELVILLE,TN,PRIMARY,867,1559,21940184,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +43004,BLACKLICK,OH,PRIMARY,11181,19234,606984386,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +46068,SHARPSVILLE,IN,PRIMARY,1371,2586,52184597,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +70395,SCHRIEVER,LA,PRIMARY,2213,4127,95205934,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +84402,OGDEN,UT,PRIMARY,348,571,7832354,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +78016,DEVINE,TX,PRIMARY,3911,7256,122646677,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +17850,MONTANDON,PA,PRIMARY,364,608,9183139,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +29162,TURBEVILLE,SC,PRIMARY,1048,1982,28902485,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +52544,CENTERVILLE,IA,PRIMARY,3476,6109,91864298,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +54628,FERRYVILLE,WI,PRIMARY,550,957,12863442,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +80935,COLORADO SPRINGS,CO,PRIMARY,800,1434,23216043,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +53140,KENOSHA,WI,PRIMARY,12669,21042,374038255,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +13438,REMSEN,NY,PRIMARY,1665,2899,57511036,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +53926,DALTON,WI,PRIMARY,599,1107,15104474,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +57720,BUFFALO,SD,PRIMARY,338,591,8671219,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +99678,TOGIAK,AK,PRIMARY,296,512,4084359,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +70653,MERRYVILLE,LA,PRIMARY,1199,2288,43277924,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +31089,TENNILLE,GA,PRIMARY,1908,3535,51591757,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +2130,JAMAICA PLAIN,MA,PRIMARY,19207,27060,930225860,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28371,PARKTON,NC,PRIMARY,2572,4854,74419550,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +50157,MALCOM,IA,PRIMARY,369,675,10747101,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +40011,CAMPBELLSBURG,KY,PRIMARY,1084,1999,34946469,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +44092,WICKLIFFE,OH,PRIMARY,9141,14390,298733108,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +97741,MADRAS,OR,PRIMARY,4516,8373,125936176,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +84032,HEBER CITY,UT,PRIMARY,6696,13213,266389121,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60076,SKOKIE,IL,PRIMARY,16726,29619,714881611,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +5769,SALISBURY,VT,PRIMARY,548,951,16611396,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +13690,STAR LAKE,NY,PRIMARY,403,726,11668199,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +78164,YORKTOWN,TX,PRIMARY,1682,3046,50870658,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +33924,CAPTIVA,FL,PRIMARY,288,426,18292113,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +4932,DIXMONT,ME,PRIMARY,556,994,19170243,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37388,TULLAHOMA,TN,PRIMARY,11254,20304,368318377,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +54729,CHIPPEWA FALLS,WI,PRIMARY,15301,26758,531748675,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +29804,AIKEN,SC,PRIMARY,332,560,13380367,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +1041,HOLYOKE,MA,PRIMARY,606,997,14650215,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +41063,MORNING VIEW,KY,PRIMARY,1400,2575,50252821,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +20634,GREAT MILLS,MD,PRIMARY,3347,5941,173165833,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +73160,OKLAHOMA CITY,OK,PRIMARY,23174,41857,855349080,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +28532,HAVELOCK,NC,PRIMARY,9805,18917,306741464,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +3777,ORFORD,NH,PRIMARY,626,1064,24973577,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60954,MOMENCE,IL,PRIMARY,3086,5430,94239294,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27603,RALEIGH,NC,PRIMARY,19277,33789,836671525,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95045,SAN JUAN BAUTISTA,CA,PRIMARY,1829,3345,85440444,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62869,NORRIS CITY,IL,PRIMARY,1225,2312,40131325,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +86015,BELLEMONT,AZ,PRIMARY,352,688,18796038,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +44611,BIG PRAIRIE,OH,PRIMARY,860,1510,24298782,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +39176,VAIDEN,MS,PRIMARY,820,1530,21377104,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +67735,GOODLAND,KS,PRIMARY,2618,4491,67588640,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +7438,OAK RIDGE,NJ,PRIMARY,5566,10328,344235177,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +12787,WHITE SULPHUR SPRINGS,NY,PRIMARY,284,493,9763246,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +13123,NORTH BAY,NY,PRIMARY,258,451,5933714,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +47141,MARYSVILLE,IN,PRIMARY,701,1328,25990664,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +95991,YUBA CITY,CA,PRIMARY,14875,27881,477345135,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +56449,FORT RIPLEY,MN,PRIMARY,728,1375,25150046,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +14437,DANSVILLE,NY,PRIMARY,4419,7679,139677842,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +53015,CLEVELAND,WI,PRIMARY,1299,2344,50719141,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +15665,MANOR,PA,PRIMARY,817,1403,28953012,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32202,JACKSONVILLE,FL,PRIMARY,1536,2088,54046102,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +44460,SALEM,OH,PRIMARY,12335,21189,378339845,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +18087,TREXLERTOWN,PA,PRIMARY,481,765,16246858,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +52722,BETTENDORF,IA,PRIMARY,17207,30958,948502308,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +84110,SALT LAKE CITY,UT,PRIMARY,709,1023,22241202,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +98550,HOQUIAM,WA,PRIMARY,4611,8013,137902954,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +59865,SAINT IGNATIUS,MT,PRIMARY,1271,2321,29946515,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +74159,TULSA,OK,PRIMARY,312,457,12805173,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +37416,CHATTANOOGA,TN,PRIMARY,7022,11829,220714255,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +25168,RED HOUSE,WV,PRIMARY,1136,2170,39931250,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +45238,CINCINNATI,OH,PRIMARY,23159,38407,772285806,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +35143,SHELBY,AL,PRIMARY,1234,2292,40345152,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27371,TROY,NC,PRIMARY,3033,5604,88266631,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +57004,BERESFORD,SD,PRIMARY,1641,3001,53161015,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +86340,SEDONA,AZ,PRIMARY,538,840,12913321,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +22405,FREDERICKSBURG,VA,PRIMARY,12910,24120,703269692,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +29729,RICHBURG,SC,PRIMARY,1019,1893,31572323,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +52632,KEOKUK,IA,PRIMARY,5782,10097,163052856,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +10505,BALDWIN PLACE,NY,PRIMARY,563,1113,45340262,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +8558,SKILLMAN,NJ,PRIMARY,3140,6048,468109768,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +73017,CEMENT,OK,PRIMARY,675,1294,19274076,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +80016,AURORA,CO,PRIMARY,17090,33336,1273315330,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +53051,MENOMONEE FALLS,WI,PRIMARY,18327,32430,952639265,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +10950,MONROE,NY,PRIMARY,15249,31099,794108919,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62272,PERCY,IL,PRIMARY,692,1258,18053957,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +60022,GLENCOE,IL,PRIMARY,4281,7975,780571641,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32081,PONTE VEDRA,FL,PRIMARY,1587,3200,102959037,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +78384,SAN DIEGO,TX,PRIMARY,2344,4355,66674520,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +89041,PAHRUMP,NV,PRIMARY,2780,4623,71850383,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +10471,BRONX,NY,PRIMARY,9876,15606,623827705,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +99587,GIRDWOOD,AK,PRIMARY,1306,1911,51471481,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +27205,ASHEBORO,NC,PRIMARY,13470,25331,464012531,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +61260,JOY,IL,PRIMARY,400,712,11099004,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +92261,PALM DESERT,CA,PRIMARY,1163,1814,30976723,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +82933,FORT BRIDGER,WY,PRIMARY,376,706,16480206,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +22150,SPRINGFIELD,VA,PRIMARY,13628,22990,565927238,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +71929,BISMARCK,AR,PRIMARY,1820,3420,52794110,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +8060,MOUNT HOLLY,NJ,PRIMARY,12422,21570,576915574,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +48202,DETROIT,MI,PRIMARY,5928,8794,130581093,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +35243,BIRMINGHAM,AL,PRIMARY,9194,15569,637548845,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +91711,CLAREMONT,CA,PRIMARY,15562,27155,842885054,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +62883,SCHELLER,IL,PRIMARY,294,551,9711667,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +20749,FORT WASHINGTON,MD,PRIMARY,304,487,15665381,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +32811,ORLANDO,FL,PRIMARY,16556,26038,375095638,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +39404,HATTIESBURG,MS,PRIMARY,1015,1685,32266631,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +54441,HEWITT,WI,PRIMARY,397,767,19498575,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +6811,DANBURY,CT,PRIMARY,14312,24956,817156060,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +38310,ADAMSVILLE,TN,PRIMARY,2385,4536,64717478,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +23231,HENRICO,VA,PRIMARY,16492,28091,606831085,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +17240,NEWBURG,PA,PRIMARY,1543,2880,47589961,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +99218,SPOKANE,WA,PRIMARY,5973,10594,210817301,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +11206,BROOKLYN,NY,PRIMARY,30896,50200,755431690,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +46989,UPLAND,IN,PRIMARY,1685,3128,56729502,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +13027,BALDWINSVILLE,NY,PRIMARY,15658,27650,728067119,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +56501,DETROIT LAKES,MN,PRIMARY,7471,12936,236814606,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 +24053,ARARAT,VA,PRIMARY,981,1837,23500829,2017-01-01 12:00:00+00:00,2017-01-01 12:00:00+00:00 diff --git a/ui/feature_repo/feature_store.yaml b/ui/feature_repo/feature_store.yaml new file mode 100644 index 0000000000..31b27e2385 --- /dev/null +++ b/ui/feature_repo/feature_store.yaml @@ -0,0 +1,10 @@ +registry: registry.db +project: credit_scoring_aws +provider: local +online_store: + type: sqlite +offline_store: + type: file +flags: + alpha_features: true + on_demand_transforms: true diff --git a/ui/feature_repo/features.py b/ui/feature_repo/features.py new file mode 100644 index 0000000000..0f74b424e7 --- /dev/null +++ b/ui/feature_repo/features.py @@ -0,0 +1,205 @@ +from datetime import timedelta + +from feast import ( + Bool, + Entity, + FeatureService, + FeatureView, + Field, + FileSource, + Int64, + String, + ValueType, +) +from feast.data_source import RequestSource +from feast.request_feature_view import RequestFeatureView +from feast.on_demand_feature_view import on_demand_feature_view +from feast.field import Field +import pandas as pd + +zipcode = Entity( + name="zipcode", + value_type=ValueType.INT64, + description="A zipcode", + labels={"owner": "danny@tecton.ai", "team": "hack week",}, +) + +zipcode_source = FileSource( + name="zipcode", + path="data/zipcode_table.parquet", + timestamp_field="event_timestamp", + created_timestamp_column="created_timestamp", +) + +zipcode_features = FeatureView( + name="zipcode_features", + entities=["zipcode"], + ttl=timedelta(days=3650), + schema=[ + Field(name="city", dtype=String), + Field(name="state", dtype=String), + Field(name="location_type", dtype=String), + Field(name="tax_returns_filed", dtype=Int64), + Field(name="population", dtype=Int64), + Field(name="total_wages", dtype=Int64), + ], + batch_source=zipcode_source, + tags={ + "date_added": "2022-02-7", + "experiments": "experiment-A,experiment-B,experiment-C", + "access_group": "feast-team@tecton.ai", + }, + online=True, +) + +zipcode_features = FeatureView( + name="zipcode_features", + entities=["zipcode"], + ttl=timedelta(days=3650), + schema=[ + Field(name="city", dtype=String), + Field(name="state", dtype=String), + Field(name="location_type", dtype=String), + Field(name="tax_returns_filed", dtype=Int64), + Field(name="population", dtype=Int64), + Field(name="total_wages", dtype=Int64), + ], + batch_source=zipcode_source, + tags={ + "date_added": "2022-02-7", + "experiments": "experiment-A,experiment-B,experiment-C", + "access_group": "feast-team@tecton.ai", + }, + online=True, +) + +zipcode_money_features = FeatureView( + name="zipcode_money_features", + entities=["zipcode"], + ttl=timedelta(days=3650), + schema=[ + Field(name="tax_returns_filed", dtype=Int64), + Field(name="total_wages", dtype=Int64), + ], + batch_source=zipcode_source, + tags={ + "date_added": "2022-02-7", + "experiments": "experiment-A,experiment-B,experiment-C", + "access_group": "feast-team@tecton.ai", + }, + online=True, +) + +dob_ssn = Entity( + name="dob_ssn", + value_type=ValueType.STRING, + description="Date of birth and last four digits of social security number", + labels={"owner": "tony@tecton.ai", "team": "hack week",}, +) + +credit_history_source = FileSource( + name="credit_history", + path="data/credit_history.parquet", + timestamp_field="event_timestamp", + created_timestamp_column="created_timestamp", +) + +credit_history = FeatureView( + name="credit_history", + entities=["dob_ssn"], + ttl=timedelta(days=9000), + schema=[ + Field(name="credit_card_due", dtype=Int64), + Field(name="mortgage_due", dtype=Int64), + Field(name="student_loan_due", dtype=Int64), + Field(name="vehicle_loan_due", dtype=Int64), + Field(name="hard_pulls", dtype=Int64), + Field(name="missed_payments_2y", dtype=Int64), + Field(name="missed_payments_1y", dtype=Int64), + Field(name="missed_payments_6m", dtype=Int64), + Field(name="bankruptcies", dtype=Int64), + ], + batch_source=credit_history_source, + tags={ + "date_added": "2022-02-6", + "experiments": "experiment-A", + "access_group": "feast-team@tecton.ai", + }, + online=True, +) + +# Define a request data source which encodes features / information only +# available at request time (e.g. part of the user initiated HTTP request) +input_request = RequestSource( + name="transaction", + schema=[ + Field(name="transaction_amt", dtype=Int64), + ], +) + +# Define an on demand feature view which can generate new features based on +# existing feature views and RequestSource features +@on_demand_feature_view( + sources={"credit_history": credit_history, "transaction": input_request,}, + schema=[ + Field(name="transaction_gt_last_credit_card_due", dtype=Bool), + ], +) +def transaction_gt_last_credit_card_due(inputs: pd.DataFrame) -> pd.DataFrame: + df = pd.DataFrame() + df["transaction_gt_last_credit_card_due"] = ( + inputs["transaction_amt"] > inputs["credit_card_due"] + ) + return df + + +# Define request feature view +transaction_request_fv = RequestFeatureView( + name="transaction_request_fv", request_data_source=input_request, +) + +model_v1 = FeatureService( + name="credit_score_v1", + features=[ + credit_history[["mortgage_due", "credit_card_due", "missed_payments_1y"]], + zipcode_features, + ], + tags={"owner": "tony@tecton.ai", "stage": "staging"}, + description="Credit scoring model", +) + +model_v2 = FeatureService( + name="credit_score_v2", + features=[ + credit_history[["mortgage_due", "credit_card_due", "missed_payments_1y"]], + zipcode_features, + transaction_request_fv, + ], + tags={"owner": "tony@tecton.ai", "stage": "prod"}, + description="Credit scoring model", +) + +model_v3 = FeatureService( + name="credit_score_v3", + features=[ + credit_history[["mortgage_due", "credit_card_due", "missed_payments_1y"]], + zipcode_features, + transaction_gt_last_credit_card_due, + ], + tags={"owner": "tony@tecton.ai", "stage": "dev"}, + description="Credit scoring model", +) + +zipcode_model = FeatureService( + name="zipcode_model", + features=[zipcode_features,], + tags={"owner": "amanda@tecton.ai", "stage": "dev"}, + description="Location model", +) + +zipcode_model_v2 = FeatureService( + name="zipcode_model_v2", + features=[zipcode_money_features,], + tags={"owner": "amanda@tecton.ai", "stage": "dev"}, + description="Location model", +) diff --git a/ui/feature_repo/materialize.sh b/ui/feature_repo/materialize.sh new file mode 100644 index 0000000000..5702895cba --- /dev/null +++ b/ui/feature_repo/materialize.sh @@ -0,0 +1,4 @@ +THREE_YEAR_AGO=$(date -v-3y -u +"%Y-%m-%dT%H:%M:%S") +feast materialize-incremental $THREE_YEAR_AGO +CURRENT_TIME=$(date -u +"%Y-%m-%dT%H:%M:%S") +feast materialize-incremental $CURRENT_TIME \ No newline at end of file diff --git a/ui/feature_repo/my_training_ds.parquet b/ui/feature_repo/my_training_ds.parquet new file mode 100644 index 0000000000..614bb424d9 Binary files /dev/null and b/ui/feature_repo/my_training_ds.parquet differ diff --git a/ui/feature_repo/test_get_features.py b/ui/feature_repo/test_get_features.py new file mode 100644 index 0000000000..42af53e94b --- /dev/null +++ b/ui/feature_repo/test_get_features.py @@ -0,0 +1,87 @@ +import pandas as pd +from feast import FeatureStore +from feast.infra.offline_stores.file import SavedDatasetFileStorage + +from feast.dqm.profilers.ge_profiler import ge_profiler + +from great_expectations.core.expectation_suite import ExpectationSuite +from great_expectations.dataset import PandasDataset + +DELTA = 0.1 # controlling allowed window in fraction of the value on scale [0, 1] +# Note: the GE integration allows asserting differences between datasets. The "ds" below is the reference dataset to check and this generates the expectation suite which can be used against future datasets. +# It's used via ge.validate(new_dataset, ExpectationSuite) +# For this demo though, we ignore this and + + +@ge_profiler +def credit_profiler(ds: PandasDataset) -> ExpectationSuite: + # simple checks on data consistency + ds.expect_column_values_to_be_between( + "credit_card_due", min_value=0, mostly=0.99, # allow some outliers + ) + + ds.expect_column_values_to_be_between( + "missed_payments_1y", + min_value=0, + max_value=5, + mostly=0.99, # allow some outliers + ) + + return ds.get_expectation_suite() + + +def generate_saved_dataset(): + store = FeatureStore(repo_path=".") + entity_df = pd.read_parquet(path="data/loan_table.parquet") + + fs = store.get_feature_service("credit_score_v1") + job = store.get_historical_features(entity_df=entity_df, features=fs,) + store.create_saved_dataset( + from_=job, + name="my_training_ds", + storage=SavedDatasetFileStorage(path="my_training_ds.parquet"), + feature_service=fs, + profiler=credit_profiler, + ) + + +def get_latest_timestamps(): + store = FeatureStore(repo_path=".") + feature_views = store.list_feature_views() + for fv in feature_views: + print( + f"Data source latest event for {fv.name} is {fv.batch_source._meta.latest_event_timestamp}" + ) + + +def test_ge(): + store = FeatureStore(repo_path=".") + + print("--- Historical features (from saved dataset) ---") + ds = store.get_saved_dataset("my_training_ds") + print(ds._profile) + + +def run_demo(): + store = FeatureStore(repo_path=".") + + print("--- Historical features (from saved dataset) ---") + ds = store.get_saved_dataset("my_training_ds") + print(ds.to_df()) + + print("\n--- Online features ---") + features = store.get_online_features( + features=store.get_feature_service("credit_score_v3"), + entity_rows=[ + {"zipcode": 30721, "dob_ssn": "19530219_5179", "transaction_amt": 1023} + ], + ).to_dict() + for key, value in sorted(features.items()): + print(key, " : ", value) + + +if __name__ == "__main__": + generate_saved_dataset() + get_latest_timestamps() + # test_ge() + run_demo() diff --git a/ui/package.json b/ui/package.json new file mode 100644 index 0000000000..e46261cdb7 --- /dev/null +++ b/ui/package.json @@ -0,0 +1,122 @@ +{ + "name": "@feast-dev/feast-ui", + "version": "0.20.2", + "private": false, + "files": [ + "dist" + ], + "main": "./dist/feast-ui.cjs", + "module": "./dist/feast-ui.module.js", + "peerDependencies": { + "@elastic/datemath": "^5.0.3", + "@elastic/eui": "^46.1.0", + "@emotion/react": "^11.7.1", + "@types/d3": "^7.1.0", + "@types/react": "^17.0.20", + "@types/react-dom": "^17.0.9", + "d3": "^7.3.0", + "inter-ui": "^3.19.3", + "moment": "^2.29.1", + "prop-types": "^15.8.1", + "query-string": "^7.1.1", + "react": "^17.0.2", + "react-dom": "^17.0.2", + "react-query": "^3.34.12", + "react-router-dom": "6", + "react-scripts": "^5.0.0", + "typescript": "^4.4.2", + "use-query-params": "^1.2.3", + "zod": "^3.11.6" + }, + "dependencies": { + "@elastic/datemath": "^5.0.3", + "@elastic/eui": "^46.1.0", + "@emotion/react": "^11.7.1", + "@types/d3": "^7.1.0", + "@types/jest": "^27.0.1", + "@types/node": "^16.7.13", + "@types/react": "^17.0.20", + "@types/react-dom": "^17.0.9", + "d3": "^7.3.0", + "inter-ui": "^3.19.3", + "moment": "^2.29.1", + "prop-types": "^15.8.1", + "query-string": "^7.1.1", + "react-query": "^3.34.12", + "react-router-dom": "6", + "react-scripts": "^5.0.0", + "use-query-params": "^1.2.3", + "zod": "^3.11.6" + }, + "scripts": { + "start": "react-scripts start", + "build": "react-scripts build", + "build:lib": "rimraf ./dist && tsc && rollup -c", + "build:lib-dev": "rimraf ./dist && tsc && rollup -c && yalc publish -f", + "test": "react-scripts test", + "eject": "react-scripts eject" + }, + "eslintConfig": { + "extends": [ + "react-app", + "react-app/jest" + ] + }, + "jest": { + "moduleNameMapper": { + "d3": "/node_modules/d3/dist/d3.min.js" + } + }, + "browserslist": { + "production": [ + ">0.2%", + "not dead", + "not op_mini all" + ], + "development": [ + "last 1 chrome version", + "last 1 firefox version", + "last 1 safari version" + ] + }, + "devDependencies": { + "@babel/core": "^7.17.5", + "@babel/preset-env": "^7.16.11", + "@babel/preset-react": "^7.16.7", + "@rollup/plugin-babel": "^5.3.1", + "@rollup/plugin-commonjs": "^21.0.2", + "@rollup/plugin-json": "^4.1.0", + "@rollup/plugin-node-resolve": "^13.1.3", + "@rollup/plugin-typescript": "^8.3.1", + "@testing-library/jest-dom": "^5.14.1", + "@testing-library/react": "^12.0.0", + "@testing-library/user-event": "^13.2.1", + "msw": "^0.36.8", + "react": "^17.0.2", + "react-dom": "^17.0.2", + "rimraf": "^3.0.2", + "rollup": "^2.68.0", + "rollup-plugin-copy": "^3.4.0", + "rollup-plugin-import-css": "^3.0.2", + "rollup-plugin-svg": "^2.0.0", + "rollup-plugin-svgo": "^1.1.0", + "rollup-plugin-terser": "^7.0.2", + "tslib": "^2.3.1", + "typescript": "^4.4.2" + }, + "description": "Web UI for the [Feast Feature Store](https://feast.dev/)", + "repository": { + "type": "git", + "url": "git+https://github.com/feast-dev/feast.git" + }, + "keywords": [ + "Feast", + "Feature", + "Store" + ], + "author": "tony@tecton.ai", + "license": "Apache-2.0", + "bugs": { + "url": "https://github.com/feast-dev/feast/issues" + } +} diff --git a/ui/public/data.json b/ui/public/data.json new file mode 100644 index 0000000000..3fd1630022 --- /dev/null +++ b/ui/public/data.json @@ -0,0 +1,6 @@ +[ + { "name": "basketball", "league": "NBA" }, + { "name": "hockey", "league": "NHL" }, + { "name": "football", "league": "UEFA" }, + { "name": "baseball", "league": "MLB" } +] diff --git a/ui/public/demo-custom-tabs/demo.json b/ui/public/demo-custom-tabs/demo.json new file mode 100644 index 0000000000..5b0162a496 --- /dev/null +++ b/ui/public/demo-custom-tabs/demo.json @@ -0,0 +1,4 @@ +{ + "hello": "world", + "name": "Feast" +} diff --git a/ui/public/empty_registry.json b/ui/public/empty_registry.json new file mode 100644 index 0000000000..2ac5ef3eab --- /dev/null +++ b/ui/public/empty_registry.json @@ -0,0 +1,3 @@ +{ + "project": "empty_registry" +} diff --git a/ui/public/favicon.ico b/ui/public/favicon.ico new file mode 100644 index 0000000000..a11777cc47 Binary files /dev/null and b/ui/public/favicon.ico differ diff --git a/ui/public/feast-favicon-300x300.png b/ui/public/feast-favicon-300x300.png new file mode 100644 index 0000000000..e9f43491f6 Binary files /dev/null and b/ui/public/feast-favicon-300x300.png differ diff --git a/ui/public/feast-favicon-36x36.png b/ui/public/feast-favicon-36x36.png new file mode 100644 index 0000000000..ed39c60c1b Binary files /dev/null and b/ui/public/feast-favicon-36x36.png differ diff --git a/ui/public/index.html b/ui/public/index.html new file mode 100644 index 0000000000..59d14d1959 --- /dev/null +++ b/ui/public/index.html @@ -0,0 +1,46 @@ + + + + + + + + + + + + + Feast Feature Store + + + +

+ + + diff --git a/ui/public/logo192.png b/ui/public/logo192.png new file mode 100644 index 0000000000..fc44b0a379 Binary files /dev/null and b/ui/public/logo192.png differ diff --git a/ui/public/logo512.png b/ui/public/logo512.png new file mode 100644 index 0000000000..a4e47a6545 Binary files /dev/null and b/ui/public/logo512.png differ diff --git a/ui/public/manifest.json b/ui/public/manifest.json new file mode 100644 index 0000000000..eaeed00748 --- /dev/null +++ b/ui/public/manifest.json @@ -0,0 +1,25 @@ +{ + "short_name": "Feast UI", + "name": "Feast UI", + "icons": [ + { + "src": "favicon.ico", + "sizes": "64x64 32x32 24x24 16x16", + "type": "image/x-icon" + }, + { + "src": "logo192.png", + "type": "image/png", + "sizes": "192x192" + }, + { + "src": "logo512.png", + "type": "image/png", + "sizes": "512x512" + } + ], + "start_url": ".", + "display": "standalone", + "theme_color": "#000000", + "background_color": "#ffffff" +} diff --git a/ui/public/metadata/credit_score_project/featureView/credit_history.json b/ui/public/metadata/credit_score_project/featureView/credit_history.json new file mode 100644 index 0000000000..7e88efd80a --- /dev/null +++ b/ui/public/metadata/credit_score_project/featureView/credit_history.json @@ -0,0 +1,185 @@ +{ + "columnsSummaryStatistics": { + "credit_card_due": { + "name": "credit_card_due", + "valueType": "INT64", + "sampleValues": [1, 2, 3], + "histogram": [ + { "x0": 0, "x1": 0.1, "count": 35 }, + { "x0": 0.1, "x1": 0.2, "count": 8 }, + { "x0": 0.2, "x1": 0.3, "count": 5 }, + { "x0": 0.3, "x1": 0.4, "count": 3 }, + { "x0": 0.4, "x1": 0.5, "count": 2 }, + { "x0": 0.5, "x1": 0.6, "count": 1 }, + { "x0": 0.6, "x1": 0.7, "count": 0 }, + { "x0": 0.7, "x1": 0.8, "count": 1 }, + { "x0": 0.8, "x1": 0.9, "count": 2 }, + { "x0": 0.9, "x1": 1, "count": 1 } + ], + "proportionOfZeros": 0.01, + "proportionMissing": 0.01, + "min": 0, + "max": 12 + }, + "mortgage_due": { + "name": "mortgage_due", + "valueType": "INT64", + "sampleValues": [4, 6, 8], + "histogram": [ + { "x0": 0, "x1": 0.1, "count": 35 }, + { "x0": 0.1, "x1": 0.2, "count": 8 }, + { "x0": 0.2, "x1": 0.3, "count": 5 }, + { "x0": 0.3, "x1": 0.4, "count": 3 }, + { "x0": 0.4, "x1": 0.5, "count": 2 }, + { "x0": 0.5, "x1": 0.6, "count": 1 }, + { "x0": 0.6, "x1": 0.7, "count": 0 }, + { "x0": 0.7, "x1": 0.8, "count": 1 }, + { "x0": 0.8, "x1": 0.9, "count": 2 }, + { "x0": 0.9, "x1": 1, "count": 1 } + ], + "proportionOfZeros": 0.01, + "proportionMissing": 0.01, + "min": 0, + "max": 12 + }, + "student_loan_due": { + "name": "student_loan_due", + "valueType": "INT64", + "sampleValues": [0, 1, 3], + "histogram": [ + { "x0": 0, "x1": 0.1, "count": 35 }, + { "x0": 0.1, "x1": 0.2, "count": 8 }, + { "x0": 0.2, "x1": 0.3, "count": 5 }, + { "x0": 0.3, "x1": 0.4, "count": 3 }, + { "x0": 0.4, "x1": 0.5, "count": 2 }, + { "x0": 0.5, "x1": 0.6, "count": 1 }, + { "x0": 0.6, "x1": 0.7, "count": 0 }, + { "x0": 0.7, "x1": 0.8, "count": 1 }, + { "x0": 0.8, "x1": 0.9, "count": 2 }, + { "x0": 0.9, "x1": 1, "count": 1 } + ], + "proportionOfZeros": 0.01, + "proportionMissing": 0.01, + "min": 0, + "max": 12 + }, + "vehicle_loan_due": { + "name": "vehicle_loan_due", + "valueType": "INT64", + "sampleValues": [2, 1, 0], + "histogram": [ + { "x0": 0, "x1": 0.1, "count": 35 }, + { "x0": 0.1, "x1": 0.2, "count": 8 }, + { "x0": 0.2, "x1": 0.3, "count": 5 }, + { "x0": 0.3, "x1": 0.4, "count": 3 }, + { "x0": 0.4, "x1": 0.5, "count": 2 }, + { "x0": 0.5, "x1": 0.6, "count": 1 }, + { "x0": 0.6, "x1": 0.7, "count": 0 }, + { "x0": 0.7, "x1": 0.8, "count": 1 }, + { "x0": 0.8, "x1": 0.9, "count": 2 }, + { "x0": 0.9, "x1": 1, "count": 1 } + ] + }, + "hard_pulls": { + "name": "hard_pulls", + "valueType": "INT64", + "sampleValues": [0, 9, 12], + "histogram": [ + { "x0": 0, "x1": 0.1, "count": 35 }, + { "x0": 0.1, "x1": 0.2, "count": 8 }, + { "x0": 0.2, "x1": 0.3, "count": 5 }, + { "x0": 0.3, "x1": 0.4, "count": 3 }, + { "x0": 0.4, "x1": 0.5, "count": 2 }, + { "x0": 0.5, "x1": 0.6, "count": 1 }, + { "x0": 0.6, "x1": 0.7, "count": 0 }, + { "x0": 0.7, "x1": 0.8, "count": 1 }, + { "x0": 0.8, "x1": 0.9, "count": 2 }, + { "x0": 0.9, "x1": 1, "count": 1 } + ], + "proportionOfZeros": 0.01, + "proportionMissing": 0.01, + "min": 0, + "max": 12 + }, + "missed_payments_2y": { + "name": "missed_payments_2y", + "valueType": "INT64", + "sampleValues": [0, 1, 3], + "histogram": [ + { "x0": 0, "x1": 0.1, "count": 35 }, + { "x0": 0.1, "x1": 0.2, "count": 8 }, + { "x0": 0.2, "x1": 0.3, "count": 5 }, + { "x0": 0.3, "x1": 0.4, "count": 3 }, + { "x0": 0.4, "x1": 0.5, "count": 2 }, + { "x0": 0.5, "x1": 0.6, "count": 1 }, + { "x0": 0.6, "x1": 0.7, "count": 0 }, + { "x0": 0.7, "x1": 0.8, "count": 1 }, + { "x0": 0.8, "x1": 0.9, "count": 2 }, + { "x0": 0.9, "x1": 1, "count": 1 } + ], + "proportionOfZeros": 0.01, + "proportionMissing": 0.01, + "min": 0, + "max": 12 + }, + "missed_payments_1y": { + "name": "missed_payments_1y", + "valueType": "INT64", + "sampleValues": [0, 1, 3], + "histogram": [ + { "x0": 0, "x1": 0.1, "count": 35 }, + { "x0": 0.1, "x1": 0.2, "count": 8 }, + { "x0": 0.2, "x1": 0.3, "count": 5 }, + { "x0": 0.3, "x1": 0.4, "count": 3 }, + { "x0": 0.4, "x1": 0.5, "count": 2 }, + { "x0": 0.5, "x1": 0.6, "count": 1 }, + { "x0": 0.6, "x1": 0.7, "count": 0 }, + { "x0": 0.7, "x1": 0.8, "count": 1 }, + { "x0": 0.8, "x1": 0.9, "count": 2 }, + { "x0": 0.9, "x1": 1, "count": 1 } + ], + "proportionOfZeros": 0.01, + "proportionMissing": 0.01, + "min": 0, + "max": 12 + }, + "missed_payments_6m": { + "name": "missed_payments_6m", + "valueType": "INT64", + "sampleValues": [0, 1, 3], + "histogram": [ + { "x0": 0, "x1": 0.1, "count": 35 }, + { "x0": 0.1, "x1": 0.2, "count": 8 }, + { "x0": 0.2, "x1": 0.3, "count": 5 }, + { "x0": 0.3, "x1": 0.4, "count": 3 }, + { "x0": 0.4, "x1": 0.5, "count": 2 }, + { "x0": 0.5, "x1": 0.6, "count": 1 }, + { "x0": 0.6, "x1": 0.7, "count": 0 }, + { "x0": 0.7, "x1": 0.8, "count": 1 }, + { "x0": 0.8, "x1": 0.9, "count": 2 }, + { "x0": 0.9, "x1": 1, "count": 1 } + ] + }, + "bankruptcies": { + "name": "bankruptcies", + "valueType": "INT64", + "sampleValues": [0, 1, 3], + "histogram": [ + { "x0": 0, "x1": 0.1, "count": 35 }, + { "x0": 0.1, "x1": 0.2, "count": 8 }, + { "x0": 0.2, "x1": 0.3, "count": 5 }, + { "x0": 0.3, "x1": 0.4, "count": 3 }, + { "x0": 0.4, "x1": 0.5, "count": 2 }, + { "x0": 0.5, "x1": 0.6, "count": 1 }, + { "x0": 0.6, "x1": 0.7, "count": 0 }, + { "x0": 0.7, "x1": 0.8, "count": 1 }, + { "x0": 0.8, "x1": 0.9, "count": 2 }, + { "x0": 0.9, "x1": 1, "count": 1 } + ], + "proportionOfZeros": 0.01, + "proportionMissing": 0.01, + "min": 0, + "max": 12 + } + } +} diff --git a/ui/public/projects-list.json b/ui/public/projects-list.json new file mode 100644 index 0000000000..d3d7c3b7d9 --- /dev/null +++ b/ui/public/projects-list.json @@ -0,0 +1,16 @@ +{ + "projects": [ + { + "name": "Credit Score Project", + "description": "Project for credit scoring team and associated models.", + "id": "credit_score_project", + "registryPath": "/registry.json" + }, + { + "name": "Empty Registry", + "description": "Testing how things look when the registry is empty", + "id": "empty_registry", + "registryPath": "/empty_registry.json" + } + ] +} diff --git a/ui/public/registry.json b/ui/public/registry.json new file mode 100644 index 0000000000..af328979e9 --- /dev/null +++ b/ui/public/registry.json @@ -0,0 +1,789 @@ +{ + "project": "credit_scoring_aws", + "dataSources": [ + { + "type": "BATCH_FILE", + "eventTimestampColumn": "event_timestamp", + "createdTimestampColumn": "created_timestamp", + "fileOptions": { + "fileUrl": "data/credit_history.parquet" + }, + "name": "credit_history", + "meta": { + "latestEventTimestamp": "2021-08-29T22:01:04.746575Z", + "earliestEventTimestamp": "2020-04-26T22:01:04.746575Z" + } + }, + { + "type": "REQUEST_SOURCE", + "requestDataOptions": { + "schema": { + "transaction_amt": "INT64" + } + }, + "name": "transaction" + }, + { + "type": "BATCH_FILE", + "eventTimestampColumn": "event_timestamp", + "createdTimestampColumn": "created_timestamp", + "fileOptions": { + "fileUrl": "data/zipcode_table.parquet" + }, + "name": "zipcode", + "meta": { + "latestEventTimestamp": "2017-01-01T12:00:00Z", + "earliestEventTimestamp": "2017-01-01T12:00:00Z" + } + } + ], + "entities": [ + { + "spec": { + "name": "__dummy", + "valueType": "STRING", + "joinKey": "__dummy_id" + }, + "meta": { + "createdTimestamp": "2022-02-09T20:40:53.101387Z", + "lastUpdatedTimestamp": "2022-02-09T20:40:53.101387Z" + } + }, + { + "spec": { + "name": "dob_ssn", + "valueType": "STRING", + "description": "Date of birth and last four digits of social security number", + "joinKey": "dob_ssn", + "labels": { + "team": "hack week", + "owner": "tony@tecton.ai" + } + }, + "meta": { + "createdTimestamp": "2022-02-09T20:40:53.101256Z", + "lastUpdatedTimestamp": "2022-02-09T20:40:53.101256Z" + } + }, + { + "spec": { + "name": "zipcode", + "valueType": "INT64", + "description": "A zipcode", + "joinKey": "zipcode", + "labels": { + "owner": "danny@tecton.ai", + "team": "hack week" + } + }, + "meta": { + "createdTimestamp": "2022-02-09T20:40:53.101335Z", + "lastUpdatedTimestamp": "2022-02-09T20:40:53.101335Z" + } + } + ], + "featureViews": [ + { + "spec": { + "name": "credit_history", + "entities": ["dob_ssn"], + "features": [ + { + "name": "credit_card_due", + "valueType": "INT64" + }, + { + "name": "mortgage_due", + "valueType": "INT64" + }, + { + "name": "student_loan_due", + "valueType": "INT64" + }, + { + "name": "vehicle_loan_due", + "valueType": "INT64" + }, + { + "name": "hard_pulls", + "valueType": "INT64" + }, + { + "name": "missed_payments_2y", + "valueType": "INT64" + }, + { + "name": "missed_payments_1y", + "valueType": "INT64" + }, + { + "name": "missed_payments_6m", + "valueType": "INT64" + }, + { + "name": "bankruptcies", + "valueType": "INT64" + } + ], + "tags": { + "access_group": "feast-team@tecton.ai", + "experiments": "experiment-A", + "date_added": "2022-02-6" + }, + "ttl": "777600000s", + "batchSource": { + "type": "BATCH_FILE", + "eventTimestampColumn": "event_timestamp", + "createdTimestampColumn": "created_timestamp", + "fileOptions": { + "fileUrl": "data/credit_history.parquet" + }, + "dataSourceClassType": "feast.infra.offline_stores.file_source.FileSource", + "name": "credit_history", + "meta": { + "latestEventTimestamp": "2021-08-29T22:01:04.746575Z", + "earliestEventTimestamp": "2020-04-26T22:01:04.746575Z" + } + }, + "online": true + }, + "meta": { + "createdTimestamp": "2022-02-09T20:40:53.101460Z", + "lastUpdatedTimestamp": "2022-02-11T20:15:13.735432Z", + "materializationIntervals": [ + { + "startTime": "1997-06-20T20:41:14.456417Z", + "endTime": "2019-02-09T20:41:11Z" + }, + { + "startTime": "2019-02-09T20:41:11Z", + "endTime": "2022-02-09T20:42:03Z" + }, + { + "startTime": "2022-02-09T20:42:03Z", + "endTime": "2022-02-11T00:18:02Z" + } + ] + } + }, + { + "spec": { + "name": "zipcode_features", + "entities": ["zipcode"], + "features": [ + { + "name": "city", + "valueType": "STRING" + }, + { + "name": "state", + "valueType": "STRING" + }, + { + "name": "location_type", + "valueType": "STRING" + }, + { + "name": "tax_returns_filed", + "valueType": "INT64" + }, + { + "name": "population", + "valueType": "INT64" + }, + { + "name": "total_wages", + "valueType": "INT64" + } + ], + "tags": { + "experiments": "experiment-A,experiment-B,experiment-C", + "date_added": "2022-02-7", + "access_group": "feast-team@tecton.ai" + }, + "ttl": "315360000s", + "batchSource": { + "type": "BATCH_FILE", + "eventTimestampColumn": "event_timestamp", + "createdTimestampColumn": "created_timestamp", + "fileOptions": { + "fileUrl": "data/zipcode_table.parquet" + }, + "dataSourceClassType": "feast.infra.offline_stores.file_source.FileSource", + "name": "zipcode", + "meta": { + "latestEventTimestamp": "2017-01-01T12:00:00Z", + "earliestEventTimestamp": "2017-01-01T12:00:00Z" + } + }, + "online": true + }, + "meta": { + "createdTimestamp": "2022-02-11T20:12:50.182923Z", + "lastUpdatedTimestamp": "2022-02-11T20:15:21.790447Z" + } + }, + { + "spec": { + "name": "zipcode_money_features", + "entities": ["zipcode"], + "features": [ + { + "name": "tax_returns_filed", + "valueType": "INT64" + }, + { + "name": "total_wages", + "valueType": "INT64" + } + ], + "tags": { + "experiments": "experiment-A,experiment-B,experiment-C", + "access_group": "feast-team@tecton.ai", + "date_added": "2022-02-7", + "test_apple": "2022-02-7", + "test_banana": "2022-02-7", + "test_cherry": "2022-02-7", + "test_danish": "2022-02-7", + "test_eggplant": "2022-02-7", + "test_figs": "2022-02-7", + "test_grape": "2022-02-7", + "test_honey": "2022-02-7", + "test_ice": "2022-02-7", + "test_jackfruit": "2022-02-7", + "test_kiwi_fruit": "2022-02-7", + "test_lychee": "2022-02-7", + "test_mango": "2022-02-7", + "test_orange": "2022-02-7", + "test_peach": "2022-02-7", + "test_question": "2022-02-7", + "test_ruby": "2022-02-7", + "test_starfruit": "2022-02-7", + "test_tamarind": "2022-02-7" + }, + "ttl": "315360000s", + "batchSource": { + "type": "BATCH_FILE", + "eventTimestampColumn": "event_timestamp", + "createdTimestampColumn": "created_timestamp", + "fileOptions": { + "fileUrl": "data/zipcode_table.parquet" + }, + "dataSourceClassType": "feast.infra.offline_stores.file_source.FileSource", + "name": "zipcode", + "meta": { + "latestEventTimestamp": "2017-01-01T12:00:00Z", + "earliestEventTimestamp": "2017-01-01T12:00:00Z" + } + }, + "online": true + }, + "meta": { + "createdTimestamp": "2022-02-11T20:10:53.228047Z", + "lastUpdatedTimestamp": "2022-02-11T20:15:15.949101Z" + } + } + ], + "featureServices": [ + { + "spec": { + "name": "credit_score_v1", + "features": [ + { + "featureViewName": "credit_history", + "featureColumns": [ + { + "name": "credit_card_due", + "valueType": "INT64" + }, + { + "name": "mortgage_due", + "valueType": "INT64" + }, + { + "name": "missed_payments_1y", + "valueType": "INT64" + } + ] + }, + { + "featureViewName": "zipcode_features", + "featureColumns": [ + { + "name": "city", + "valueType": "STRING" + }, + { + "name": "state", + "valueType": "STRING" + }, + { + "name": "location_type", + "valueType": "STRING" + }, + { + "name": "tax_returns_filed", + "valueType": "INT64" + }, + { + "name": "population", + "valueType": "INT64" + }, + { + "name": "total_wages", + "valueType": "INT64" + } + ] + } + ], + "tags": { + "owner": "tony@tecton.ai", + "stage": "staging" + }, + "description": "Credit scoring model" + }, + "meta": { + "createdTimestamp": "2022-02-11T20:12:50.186773Z" + } + }, + { + "spec": { + "name": "credit_score_v2", + "features": [ + { + "featureViewName": "credit_history", + "featureColumns": [ + { + "name": "credit_card_due", + "valueType": "INT64" + }, + { + "name": "mortgage_due", + "valueType": "INT64" + }, + { + "name": "missed_payments_1y", + "valueType": "INT64" + } + ] + }, + { + "featureViewName": "zipcode_features", + "featureColumns": [ + { + "name": "city", + "valueType": "STRING" + }, + { + "name": "state", + "valueType": "STRING" + }, + { + "name": "location_type", + "valueType": "STRING" + }, + { + "name": "tax_returns_filed", + "valueType": "INT64" + }, + { + "name": "population", + "valueType": "INT64" + }, + { + "name": "total_wages", + "valueType": "INT64" + } + ] + }, + { + "featureViewName": "transaction_request_fv", + "featureColumns": [ + { + "name": "transaction_amt", + "valueType": "INT64" + } + ] + } + ], + "tags": { + "stage": "prod", + "owner": "tony@tecton.ai" + }, + "description": "Credit scoring model" + }, + "meta": { + "createdTimestamp": "2022-02-11T20:12:50.185785Z" + } + }, + { + "spec": { + "name": "credit_score_v3", + "features": [ + { + "featureViewName": "credit_history", + "featureColumns": [ + { + "name": "credit_card_due", + "valueType": "INT64" + }, + { + "name": "mortgage_due", + "valueType": "INT64" + }, + { + "name": "missed_payments_1y", + "valueType": "INT64" + } + ] + }, + { + "featureViewName": "zipcode_features", + "featureColumns": [ + { + "name": "city", + "valueType": "STRING" + }, + { + "name": "state", + "valueType": "STRING" + }, + { + "name": "location_type", + "valueType": "STRING" + }, + { + "name": "tax_returns_filed", + "valueType": "INT64" + }, + { + "name": "population", + "valueType": "INT64" + }, + { + "name": "total_wages", + "valueType": "INT64" + } + ] + }, + { + "featureViewName": "transaction_gt_last_credit_card_due", + "featureColumns": [ + { + "name": "transaction_gt_last_credit_card_due", + "valueType": "BOOL" + } + ] + } + ], + "tags": { + "stage": "dev", + "owner": "tony@tecton.ai" + }, + "description": "Credit scoring model" + }, + "meta": { + "createdTimestamp": "2022-02-11T20:12:50.186367Z" + } + }, + { + "spec": { + "name": "zipcode_model", + "features": [ + { + "featureViewName": "zipcode_features", + "featureColumns": [ + { + "name": "city", + "valueType": "STRING" + }, + { + "name": "state", + "valueType": "STRING" + }, + { + "name": "location_type", + "valueType": "STRING" + }, + { + "name": "tax_returns_filed", + "valueType": "INT64" + }, + { + "name": "population", + "valueType": "INT64" + }, + { + "name": "total_wages", + "valueType": "INT64" + } + ] + } + ], + "tags": { + "stage": "dev", + "owner": "amanda@tecton.ai" + }, + "description": "Location model" + }, + "meta": { + "createdTimestamp": "2022-02-11T20:12:50.187069Z" + } + }, + { + "spec": { + "name": "zipcode_model_v2", + "features": [ + { + "featureViewName": "zipcode_money_features", + "featureColumns": [ + { + "name": "tax_returns_filed", + "valueType": "INT64" + }, + { + "name": "total_wages", + "valueType": "INT64" + } + ] + } + ], + "tags": { + "owner": "amanda@tecton.ai", + "stage": "dev" + }, + "description": "Location model" + }, + "meta": { + "createdTimestamp": "2022-02-11T20:17:15.582561Z" + } + } + ], + "onDemandFeatureViews": [ + { + "spec": { + "name": "transaction_gt_last_credit_card_due", + "features": [ + { + "name": "transaction_gt_last_credit_card_due", + "valueType": "BOOL" + } + ], + "inputs": { + "transaction": { + "requestDataSource": { + "type": "REQUEST_SOURCE", + "requestDataOptions": { + "schema": { + "transaction_amt": "INT64" + } + }, + "name": "transaction" + } + }, + "credit_history": { + "featureViewProjection": { + "featureViewName": "credit_history", + "featureColumns": [ + { + "name": "credit_card_due", + "valueType": "INT64" + }, + { + "name": "mortgage_due", + "valueType": "INT64" + }, + { + "name": "student_loan_due", + "valueType": "INT64" + }, + { + "name": "vehicle_loan_due", + "valueType": "INT64" + }, + { + "name": "hard_pulls", + "valueType": "INT64" + }, + { + "name": "missed_payments_2y", + "valueType": "INT64" + }, + { + "name": "missed_payments_1y", + "valueType": "INT64" + }, + { + "name": "missed_payments_6m", + "valueType": "INT64" + }, + { + "name": "bankruptcies", + "valueType": "INT64" + } + ] + } + } + }, + "userDefinedFunction": { + "name": "transaction_gt_last_credit_card_due", + "body": "@on_demand_feature_view(\n sources={\"credit_history\": credit_history, \"transaction\": input_request,},\n schema=[\n Field(name=\"transaction_gt_last_credit_card_due\", dtype=Bool),\n ],\n)\ndef transaction_gt_last_credit_card_due(inputs: pd.DataFrame) -> pd.DataFrame:\n df = pd.DataFrame()\n df[\"transaction_gt_last_credit_card_due\"] = (\n inputs[\"transaction_amt\"] > inputs[\"credit_card_due\"]\n )\n return df\n" + } + }, + "meta": { + "createdTimestamp": "2022-02-11T20:17:15.581514Z", + "lastUpdatedTimestamp": "2022-02-11T20:17:15.581514Z" + } + } + ], + "requestFeatureViews": [ + { + "spec": { + "name": "transaction_request_fv", + "requestDataSource": { + "type": "REQUEST_SOURCE", + "requestDataOptions": { + "schema": { + "transaction_amt": "INT64" + } + }, + "name": "transaction" + } + } + } + ], + "savedDatasets": [ + { + "spec": { + "name": "my_training_ds", + "features": [ + "credit_history:credit_card_due", + "credit_history:mortgage_due", + "credit_history:missed_payments_1y", + "zipcode_features:city", + "zipcode_features:state", + "zipcode_features:location_type", + "zipcode_features:tax_returns_filed", + "zipcode_features:population", + "zipcode_features:total_wages" + ], + "joinKeys": [ + "person_income", + "person_emp_length", + "created_timestamp", + "zipcode", + "person_home_ownership", + "loan_amnt", + "person_age", + "loan_int_rate", + "loan_status", + "loan_id", + "dob_ssn", + "loan_intent" + ], + "storage": { + "fileStorage": { + "fileFormat": { + "parquetFormat": {} + }, + "fileUrl": "my_training_ds.parquet" + } + }, + "featureService": { + "spec": { + "name": "credit_score_v1", + "features": [ + { + "featureViewName": "credit_history", + "featureColumns": [ + { + "name": "credit_card_due", + "valueType": "INT64" + }, + { + "name": "mortgage_due", + "valueType": "INT64" + }, + { + "name": "missed_payments_1y", + "valueType": "INT64" + } + ] + }, + { + "featureViewName": "zipcode_features", + "featureColumns": [ + { + "name": "city", + "valueType": "STRING" + }, + { + "name": "state", + "valueType": "STRING" + }, + { + "name": "location_type", + "valueType": "STRING" + }, + { + "name": "tax_returns_filed", + "valueType": "INT64" + }, + { + "name": "population", + "valueType": "INT64" + }, + { + "name": "total_wages", + "valueType": "INT64" + } + ] + } + ], + "tags": { + "stage": "staging", + "owner": "tony@tecton.ai" + }, + "description": "Credit scoring model" + }, + "meta": { + "createdTimestamp": "2022-02-09T20:40:53.103078Z" + } + }, + "profile": "{\"meta\": {\"great_expectations_version\": \"0.14.4\"}, \"expectations\": [{\"meta\": {}, \"kwargs\": {\"column\": \"credit_card_due\", \"min_value\": 0, \"mostly\": 0.99}, \"expectation_type\": \"expect_column_values_to_be_between\"}, {\"meta\": {}, \"kwargs\": {\"column\": \"missed_payments_1y\", \"min_value\": 0, \"max_value\": 5, \"mostly\": 0.99}, \"expectation_type\": \"expect_column_values_to_be_between\"}], \"data_asset_type\": \"Dataset\", \"expectation_suite_name\": \"default\", \"ge_cloud_id\": null}" + }, + "meta": { + "createdTimestamp": "2022-02-09T20:44:03.377806Z", + "minEventTimestamp": "2020-08-25T20:34:41.361Z", + "maxEventTimestamp": "2021-08-25T20:34:41.361Z" + } + } + ], + "infra": [ + { + "infraObjects": [ + { + "infraObjectClassType": "feast.infra.online_stores.sqlite.SqliteTable", + "sqliteTable": { + "path": "/Users/dannychiao/GitHub/feast-ui/feature_repo/data/online.db", + "name": "credit_scoring_aws_credit_history" + } + }, + { + "infraObjectClassType": "feast.infra.online_stores.sqlite.SqliteTable", + "sqliteTable": { + "path": "/Users/dannychiao/GitHub/feast-ui/feature_repo/data/online.db", + "name": "credit_scoring_aws_zipcode_features" + } + }, + { + "infraObjectClassType": "feast.infra.online_stores.sqlite.SqliteTable", + "sqliteTable": { + "path": "/Users/dannychiao/GitHub/feast-ui/feature_repo/data/online.db", + "name": "credit_scoring_aws_zipcode_money_features" + } + } + ] + } + ] +} diff --git a/ui/public/robots.txt b/ui/public/robots.txt new file mode 100644 index 0000000000..e9e57dc4d4 --- /dev/null +++ b/ui/public/robots.txt @@ -0,0 +1,3 @@ +# https://www.robotstxt.org/robotstxt.html +User-agent: * +Disallow: diff --git a/ui/rollup.config.js b/ui/rollup.config.js new file mode 100644 index 0000000000..93e17345fe --- /dev/null +++ b/ui/rollup.config.js @@ -0,0 +1,102 @@ +import pluginTypescript from "@rollup/plugin-typescript"; +import pluginCommonjs from "@rollup/plugin-commonjs"; +import pluginNodeResolve from "@rollup/plugin-node-resolve"; +import { babel } from "@rollup/plugin-babel"; +import json from "@rollup/plugin-json"; +import css from "rollup-plugin-import-css"; +import svg from "rollup-plugin-svg"; +import copy from "rollup-plugin-copy"; // https://npm.io/package/rollup-plugin-copy + +import * as path from "path"; +import pkg from "./package.json"; + +const moduleName = pkg.name.replace(/^@.*\//, ""); +const inputFileName = "src/FeastUI.tsx"; +const author = pkg.author; +const banner = ` + /** + * @license + * author: ${author} + * ${moduleName}.js v${pkg.version} + * Released under the ${pkg.license} license. + */ +`; + +const rollupConfig = [ + // ES + { + input: inputFileName, + output: [ + { + file: pkg.module, + format: "es", + sourcemap: "inline", + banner, + exports: "named", + }, + ], + external: [ + ...Object.keys(pkg.dependencies || {}), + ...Object.keys(pkg.devDependencies || {}), + ], + plugins: [ + pluginTypescript(), + pluginCommonjs({ + extensions: [".js", ".ts"], + }), + babel({ + babelHelpers: "bundled", + configFile: path.resolve(__dirname, ".babelrc.js"), + }), + pluginNodeResolve({ + browser: false, + }), + css({ + output: "feast-ui.css", + }), + svg(), + json(), + copy({ + targets: [{ src: "src/assets/**/*", dest: "dist/assets/" }], + }), + ], + }, + + // CommonJS + { + input: inputFileName, + output: [ + { + file: pkg.main, + format: "cjs", + sourcemap: "inline", + banner, + exports: "default", + }, + ], + external: [ + ...Object.keys(pkg.dependencies || {}), + ...Object.keys(pkg.devDependencies || {}), + ], + plugins: [ + pluginTypescript(), + pluginCommonjs({ + extensions: [".js", ".ts"], + }), + babel({ + babelHelpers: "bundled", + configFile: path.resolve(__dirname, ".babelrc.js"), + }), + pluginNodeResolve({ + browser: false, + }), + css({ + output: "feast-ui.css", + }), + svg(), + json(), + ], + }, +]; + +export default rollupConfig; diff --git a/ui/sample.png b/ui/sample.png new file mode 100644 index 0000000000..360d57186d Binary files /dev/null and b/ui/sample.png differ diff --git a/ui/src/App.css b/ui/src/App.css new file mode 100644 index 0000000000..4577c6f333 --- /dev/null +++ b/ui/src/App.css @@ -0,0 +1,4 @@ +html { + background: url("assets/feast-icon-white.svg") no-repeat bottom left; + background-size: 20vh; +} diff --git a/ui/src/FeastUI.tsx b/ui/src/FeastUI.tsx new file mode 100644 index 0000000000..628b916f2d --- /dev/null +++ b/ui/src/FeastUI.tsx @@ -0,0 +1,33 @@ +import React from "react"; + +import { BrowserRouter } from "react-router-dom"; +import { QueryClient, QueryClientProvider } from "react-query"; +import { QueryParamProvider } from "use-query-params"; +import RouteAdapter from "./hacks/RouteAdapter"; +import FeastUISansProviders, { FeastUIConfigs } from "./FeastUISansProviders"; + +interface FeastUIProps { + reactQueryClient?: QueryClient; + feastUIConfigs?: FeastUIConfigs; +} + +const defaultQueryClient = new QueryClient(); + +const FeastUI = ({ reactQueryClient, feastUIConfigs }: FeastUIProps) => { + const queryClient = reactQueryClient || defaultQueryClient; + + return ( + + + + + + + + ); +}; + +export default FeastUI; +export type { FeastUIConfigs }; diff --git a/ui/src/FeastUISansProviders.test.tsx b/ui/src/FeastUISansProviders.test.tsx new file mode 100644 index 0000000000..1289cea028 --- /dev/null +++ b/ui/src/FeastUISansProviders.test.tsx @@ -0,0 +1,96 @@ +import React from "react"; + +import { setupServer } from "msw/node"; +import { render } from "./test-utils"; +import { + waitFor, + screen, + waitForElementToBeRemoved, +} from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; + +import FeastUISansProviders from "./FeastUISansProviders"; +import { + projectsListWithDefaultProject, + creditHistoryRegistry, +} from "./mocks/handlers"; + +import registry from "../public/registry.json"; + +// declare which API requests to mock +const server = setupServer( + projectsListWithDefaultProject, + creditHistoryRegistry +); + +// establish API mocking before all tests +beforeAll(() => server.listen()); +// reset any request handlers that are declared as a part of our tests +// (i.e. for testing one-time error scenarios) +afterEach(() => server.resetHandlers()); +// clean up once the tests are done +afterAll(() => server.close()); + +test("full app rendering", async () => { + render(); + + // Rendering the app without any paths should mean + // rendering the + // Therefore we should expect to see + // "Welcome to Feast." + const noProjectSelectedElement = screen.getByText(/Welcome to Feast/i); + + expect(noProjectSelectedElement).toBeInTheDocument(); + + // Wait for the Redirect, and check that it got removed + await waitForElementToBeRemoved(noProjectSelectedElement); + + expect(screen.queryByText(/Welcome to Feast/i)).toBeNull(); + + // Explore Panel Should Appear + expect(screen.getByText(/Explore this Project/i)).toBeInTheDocument(); + + const projectNameRegExp = new RegExp(registry.project, "i"); + + // It should load the default project, which is credit_scoring_aws + await waitFor(() => { + expect(screen.getByText(projectNameRegExp)).toBeInTheDocument(); + }); +}); + +const leftClick = { button: 0 }; + +test("routes are reachable", async () => { + render(); + + // Wait for content to load + await screen.findByText(/Explore this Project/i); + + const mainRoutesNames = [ + "Data Sources", + "Entities", + "Feature Views", + "Feature Services", + "Datasets", + ]; + + for (const routeName of mainRoutesNames) { + // Main heading shouldn't start with the route name + expect( + screen.queryByRole("heading", { name: routeName, level: 1 }) + ).toBeNull(); + + const routeRegExp = new RegExp(routeName, "i"); + + userEvent.click( + screen.getByRole("button", { name: routeRegExp }), + leftClick + ); + + // Should land on a page with the heading + screen.getByRole("heading", { + name: routeName, + level: 1, + }); + } +}); diff --git a/ui/src/FeastUISansProviders.tsx b/ui/src/FeastUISansProviders.tsx new file mode 100644 index 0000000000..a6df45b86d --- /dev/null +++ b/ui/src/FeastUISansProviders.tsx @@ -0,0 +1,125 @@ +import React from "react"; + +import "@elastic/eui/dist/eui_theme_light.css"; +import "./index.css"; + +import { Routes, Route } from "react-router-dom"; +import { EuiProvider, EuiErrorBoundary } from "@elastic/eui"; + +import ProjectOverviewPage from "./pages/ProjectOverviewPage"; +import Layout from "./pages/Layout"; +import NoMatch from "./pages/NoMatch"; +import DatasourceIndex from "./pages/data-sources/Index"; +import DatasetIndex from "./pages/saved-data-sets/Index"; +import EntityIndex from "./pages/entities/Index"; +import EntityInstance from "./pages/entities/EntityInstance"; +import FeatureServiceIndex from "./pages/feature-services/Index"; +import FeatureViewIndex from "./pages/feature-views/Index"; +import FeatureViewInstance from "./pages/feature-views/FeatureViewInstance"; +import FeatureServiceInstance from "./pages/feature-services/FeatureServiceInstance"; +import DataSourceInstance from "./pages/data-sources/DataSourceInstance"; +import RootProjectSelectionPage from "./pages/RootProjectSelectionPage"; +import DatasetInstance from "./pages/saved-data-sets/DatasetInstance"; +import NoProjectGuard from "./components/NoProjectGuard"; + +import TabsRegistryContext, { + FeastTabsRegistryInterface, +} from "./custom-tabs/TabsRegistryContext"; +import FeatureFlagsContext, { + FeatureFlags, +} from "./contexts/FeatureFlagsContext"; +import { + ProjectListContext, + ProjectsListContextInterface, +} from "./contexts/ProjectListContext"; + +interface FeastUIConfigs { + tabsRegistry: FeastTabsRegistryInterface; + featureFlags?: FeatureFlags; + projectListPromise?: Promise; +} + +const defaultProjectListPromise = () => { + return fetch("/projects-list.json", { + headers: { + "Content-Type": "application/json", + }, + }).then((res) => { + return res.json(); + }); +}; + +const FeastUISansProviders = ({ + feastUIConfigs, +}: { + feastUIConfigs?: FeastUIConfigs; +}) => { + const projectListContext: ProjectsListContextInterface = + feastUIConfigs?.projectListPromise + ? { + projectsListPromise: feastUIConfigs?.projectListPromise, + isCustom: true, + } + : { projectsListPromise: defaultProjectListPromise(), isCustom: false }; + + return ( + + + + + + + }> + } /> + }> + } /> + } /> + } + /> + } + /> + } + /> + } + /> + } + /> + } /> + } + /> + + } /> + } + /> + + + } /> + + + + + + + ); +}; + +export default FeastUISansProviders; +export type { FeastUIConfigs }; diff --git a/ui/src/assets/feast-icon-blue.svg b/ui/src/assets/feast-icon-blue.svg new file mode 100644 index 0000000000..fcb755d99a --- /dev/null +++ b/ui/src/assets/feast-icon-blue.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/ui/src/assets/feast-icon-grey.svg b/ui/src/assets/feast-icon-grey.svg new file mode 100644 index 0000000000..5dca752aa3 --- /dev/null +++ b/ui/src/assets/feast-icon-grey.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/ui/src/assets/feast-icon-white.svg b/ui/src/assets/feast-icon-white.svg new file mode 100644 index 0000000000..c194959447 --- /dev/null +++ b/ui/src/assets/feast-icon-white.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/ui/src/assets/logo.svg b/ui/src/assets/logo.svg new file mode 100644 index 0000000000..9dfc1c058c --- /dev/null +++ b/ui/src/assets/logo.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/ui/src/components/EuiCustomLink.jsx b/ui/src/components/EuiCustomLink.jsx new file mode 100644 index 0000000000..cf646d43f7 --- /dev/null +++ b/ui/src/components/EuiCustomLink.jsx @@ -0,0 +1,46 @@ +// File name: "EuiCustomLink.js". +import React from "react"; +import { EuiLink } from "@elastic/eui"; +import { useNavigate, useHref } from "react-router-dom"; + +const isModifiedEvent = (event) => + !!(event.metaKey || event.altKey || event.ctrlKey || event.shiftKey); + +const isLeftClickEvent = (event) => event.button === 0; + +const isTargetBlank = (event) => { + const target = event.target.getAttribute("target"); + return target && target !== "_self"; +}; + +export default function EuiCustomLink({ to, ...rest }) { + // This is the key! + const navigate = useNavigate(); + + function onClick(event) { + if (event.defaultPrevented) { + return; + } + + // Let the browser handle links that open new tabs/windows + if ( + isModifiedEvent(event) || + !isLeftClickEvent(event) || + isTargetBlank(event) + ) { + return; + } + + // Prevent regular link behavior, which causes a browser refresh. + event.preventDefault(); + + // Push the route to the history. + navigate(to); + } + + // Generate the correct link href (with basename accounted for) + const href = useHref({ pathname: to }); + + const props = { ...rest, href, onClick }; + return ; +} diff --git a/ui/src/components/ExplorePanel.tsx b/ui/src/components/ExplorePanel.tsx new file mode 100644 index 0000000000..1bcc2e9978 --- /dev/null +++ b/ui/src/components/ExplorePanel.tsx @@ -0,0 +1,62 @@ +import React from "react"; + +import { + EuiHorizontalRule, + EuiPanel, + EuiTitle, + EuiBadge, + EuiLoadingContent, + EuiFlexGroup, + EuiFlexItem, + EuiSpacer, +} from "@elastic/eui"; + +import { useNavigate } from "react-router-dom"; +import useFCOExploreSuggestions from "../hooks/useFCOExploreSuggestions"; + +const ExplorePanel = () => { + const { isLoading, isSuccess, data } = useFCOExploreSuggestions(); + + const navigate = useNavigate(); + + return ( + + +

Explore this Project

+
+ + {isLoading && } + {isSuccess && + data && + data.map((suggestionGroup, i) => { + return ( + + +

{suggestionGroup.title}

+
+ + {suggestionGroup.items.map((item, j) => { + return ( + + { + navigate(item.link); + }} + onClickAriaLabel={item.label} + > + {item.name} ({item.count}) + + + ); + })} + + +
+ ); + })} +
+ ); +}; + +export default ExplorePanel; diff --git a/ui/src/components/FeaturesInServiceDisplay.tsx b/ui/src/components/FeaturesInServiceDisplay.tsx new file mode 100644 index 0000000000..39091b81de --- /dev/null +++ b/ui/src/components/FeaturesInServiceDisplay.tsx @@ -0,0 +1,71 @@ +import React from "react"; +import { z } from "zod"; +import { EuiBasicTable } from "@elastic/eui"; +import { FeastFeatureInServiceType } from "../parsers/feastFeatureServices"; +import EuiCustomLink from "./EuiCustomLink"; +import { FEAST_FEATURE_VALUE_TYPES } from "../parsers/types"; +import { useParams } from "react-router-dom"; + +interface FeatureViewsListInterace { + featureViews: FeastFeatureInServiceType[]; +} + +const FeaturesInServiceList = ({ featureViews }: FeatureViewsListInterace) => { + const { projectName } = useParams(); + + const FeatureInService = z.object({ + featureViewName: z.string(), + featureColumnName: z.string(), + valueType: z.nativeEnum(FEAST_FEATURE_VALUE_TYPES), + }); + type FeatureInServiceType = z.infer; + + var items: FeatureInServiceType[] = []; + featureViews.forEach((featureView) => { + featureView.featureColumns.forEach((featureColumn) => { + const row: FeatureInServiceType = { + featureViewName: featureView.featureViewName, + featureColumnName: featureColumn.name, + valueType: featureColumn.valueType, + }; + items.push(row); + }); + }); + + const columns = [ + { + name: "Feature View", + field: "featureViewName", + render: (name: string) => { + return ( + + {name} + + ); + }, + }, + { + name: "Feature Column", + field: "featureColumnName", + }, + { + name: "Value Type", + field: "valueType", + }, + ]; + + const getRowProps = (item: FeatureInServiceType) => { + return { + "data-test-subj": `row-${item.featureViewName}`, + }; + }; + + return ( + + ); +}; + +export default FeaturesInServiceList; diff --git a/ui/src/components/FeaturesListDisplay.tsx b/ui/src/components/FeaturesListDisplay.tsx new file mode 100644 index 0000000000..abd9c1d2e4 --- /dev/null +++ b/ui/src/components/FeaturesListDisplay.tsx @@ -0,0 +1,80 @@ +import React, { useContext } from "react"; +import { EuiBasicTable, EuiLoadingSpinner, EuiBadge } from "@elastic/eui"; +import { FeastFeatureColumnType } from "../parsers/feastFeatureViews"; +import useLoadFeatureViewSummaryStatistics from "../queries/useLoadFeatureViewSummaryStatistics"; +import SparklineHistogram from "./SparklineHistogram"; +import FeatureFlagsContext from "../contexts/FeatureFlagsContext"; + +interface FeaturesListProps { + featureViewName: string; + features: FeastFeatureColumnType[]; +} + +const FeaturesList = ({ featureViewName, features }: FeaturesListProps) => { + const { enabledFeatureStatistics } = useContext(FeatureFlagsContext); + const { isLoading, isError, isSuccess, data } = + useLoadFeatureViewSummaryStatistics(featureViewName); + + let columns: { name: string; render?: any; field: any }[] = [ + { name: "Name", field: "name" }, + { + name: "Value Type", + field: "valueType", + }, + ]; + + if (enabledFeatureStatistics) { + columns.push( + ...[ + { + name: "Sample", + field: "", + render: (item: FeastFeatureColumnType) => { + const statistics = + isSuccess && data && data.columnsSummaryStatistics[item.name]; + + return ( + + {isLoading && } + {isError && ( + error loading samples + )} + {statistics && statistics.sampleValues.join(",")} + + ); + }, + }, + { + name: "Sparklines", + field: "", + render: (item: FeastFeatureColumnType) => { + const statistics = + isSuccess && data && data.columnsSummaryStatistics[item.name]; + + if ( + statistics && + statistics.valueType === "INT64" && + statistics.histogram + ) { + return ; + } else { + return ""; + } + }, + }, + ] + ); + } + + const getRowProps = (item: FeastFeatureColumnType) => { + return { + "data-test-subj": `row-${item.name}`, + }; + }; + + return ( + + ); +}; + +export default FeaturesList; diff --git a/ui/src/components/NoProjectGuard.tsx b/ui/src/components/NoProjectGuard.tsx new file mode 100644 index 0000000000..8501f6c931 --- /dev/null +++ b/ui/src/components/NoProjectGuard.tsx @@ -0,0 +1,72 @@ +import { EuiEmptyPrompt, EuiLoadingContent } from "@elastic/eui"; +import React, { useContext } from "react"; +import { Outlet, useParams } from "react-router-dom"; +import { + ProjectListContext, + useLoadProjectsList, +} from "../contexts/ProjectListContext"; +import ProjectSelector from "./ProjectSelector"; + +const NoProjectGuard = () => { + const { projectName } = useParams(); + + const { isLoading, isError, data } = useLoadProjectsList(); + const projectListContext = useContext(ProjectListContext); + + if (isLoading && !data) { + return ; + } + + if (isError) { + return ( + Error Loading Project List} + body={ + projectListContext?.isCustom ? ( +

+ Unable to fetch project list. Check the promise provided to Feast + UI in projectListPromise. +

+ ) : ( +

+ Unable to find + projects-list.json. Check that you have a project + list file defined. +

+ ) + } + /> + ); + } + + const currentProject = data?.projects.find((project) => { + return project.id === projectName; + }); + + if (currentProject === undefined) { + return ( + Error Loading Project} + body={ + +

+ There is no project with id {projectName} in{" "} + projects-list.json. Check that you have the correct + project id. +

+

You can also select one of the project in the following list:

+ +
+ } + /> + ); + } + + return ; +}; + +export default NoProjectGuard; diff --git a/ui/src/components/NumericFeaturesTable.tsx b/ui/src/components/NumericFeaturesTable.tsx new file mode 100644 index 0000000000..7c55f5ddba --- /dev/null +++ b/ui/src/components/NumericFeaturesTable.tsx @@ -0,0 +1,60 @@ +import { EuiBasicTable } from "@elastic/eui"; +import React from "react"; +import { NumericColumnSummaryStatisticType } from "../parsers/featureViewSummaryStatistics"; +import SparklineHistogram from "./SparklineHistogram"; + +interface NumericFeaturesTableProps { + data: NumericColumnSummaryStatisticType[]; +} + +const NumericFeaturesTable = ({ data }: NumericFeaturesTableProps) => { + const columns = [ + { name: "Name", field: "name" }, + { + name: "Value Type", + field: "valueType", + }, + { + name: "Sample", + render: (statistics: NumericColumnSummaryStatisticType) => { + return ( + + {statistics && statistics.sampleValues.join(",")} + + ); + }, + }, + { + name: "Min/Max", + render: (statistics: NumericColumnSummaryStatisticType) => { + return statistics.min !== undefined && statistics.max !== undefined + ? `${statistics.min}/${statistics.max}` + : undefined; + }, + }, + { name: "zeros", field: "proportionOfZeros" }, + { name: "missing", field: "proportionMissing" }, + { + name: "Sparklines", + render: (statistics: NumericColumnSummaryStatisticType) => { + if (statistics && statistics.histogram) { + return ; + } else { + return ""; + } + }, + }, + ]; + + const getRowProps = (item: NumericColumnSummaryStatisticType) => { + return { + "data-test-subj": `row-${item.name}`, + }; + }; + + return ( + + ); +}; + +export default NumericFeaturesTable; diff --git a/ui/src/components/ObjectsCountStats.tsx b/ui/src/components/ObjectsCountStats.tsx new file mode 100644 index 0000000000..bf1dd2dc9d --- /dev/null +++ b/ui/src/components/ObjectsCountStats.tsx @@ -0,0 +1,98 @@ +import React, { useContext } from "react"; +import { + EuiFlexGroup, + EuiFlexItem, + EuiStat, + EuiHorizontalRule, + EuiTitle, + EuiSpacer, +} from "@elastic/eui"; +import useLoadRegistry from "../queries/useLoadRegistry"; +import { useNavigate, useParams } from "react-router-dom"; +import RegistryPathContext from "../contexts/RegistryPathContext"; + +const useLoadObjectStats = () => { + const registryUrl = useContext(RegistryPathContext); + const query = useLoadRegistry(registryUrl); + + const data = + query.isSuccess && query.data + ? { + featureServices: query.data.objects.featureServices?.length || 0, + featureViews: query.data.mergedFVList.length, + entities: query.data.objects.entities?.length || 0, + dataSources: query.data.objects.dataSources?.length || 0, + } + : undefined; + + return { + ...query, + data, + }; +}; + +const statStyle = { cursor: "pointer" }; + +const ObjectsCountStats = () => { + const { isLoading, isSuccess, isError, data } = useLoadObjectStats(); + const { projectName } = useParams(); + + const navigate = useNavigate(); + + return ( + + + + {isLoading &&

Loading

} + {isError &&

There was an error in loading registry information.

} + {isSuccess && data && ( + + +

Registered in this Feast project are …

+
+ + + + navigate(`/p/${projectName}/feature-service`)} + description="Feature Services→" + title={data.featureServices} + reverse + /> + + + navigate(`/p/${projectName}/feature-view`)} + title={data.featureViews} + reverse + /> + + + navigate(`/p/${projectName}/entity`)} + title={data.entities} + reverse + /> + + + navigate(`/p/${projectName}/data-source`)} + title={data.dataSources} + reverse + /> + + +
+ )} +
+ ); +}; + +export default ObjectsCountStats; diff --git a/ui/src/components/ProjectSelector.test.tsx b/ui/src/components/ProjectSelector.test.tsx new file mode 100644 index 0000000000..0032b88839 --- /dev/null +++ b/ui/src/components/ProjectSelector.test.tsx @@ -0,0 +1,79 @@ +import { setupServer } from "msw/node"; +import { render } from "../test-utils"; +import { screen, within } from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; + +import FeastUISansProviders from "../FeastUISansProviders"; + +import { + projectsListWithDefaultProject, + creditHistoryRegistry, +} from "../mocks/handlers"; + +// declare which API requests to mock +const server = setupServer( + projectsListWithDefaultProject, + creditHistoryRegistry +); + +// establish API mocking before all tests +beforeAll(() => server.listen()); +// reset any request handlers that are declared as a part of our tests +// (i.e. for testing one-time error scenarios) +afterEach(() => server.resetHandlers()); +// clean up once the tests are done +afterAll(() => server.close()); + +test("in a full App render, it shows the right initial project", async () => { + render(); + + const select = await screen.findByRole("combobox", { + name: "Select a Feast Project", + }); + + // Wait for Project List to Load + const options = await within(select).findAllByRole("option"); + + const topLevelNavigation = await screen.findByRole("navigation", { + name: "Top Level", + }); + + within(topLevelNavigation).getByDisplayValue("Credit Score Project"); + + expect(options.length).toBe(1); + + // Wait for Project Data from Registry to Load + await screen.findAllByRole("heading", { + name: /Project:/i, + }); + + // Before User Event: Heading is the credit scoring project + screen.getByRole("heading", { + name: /credit_scoring_aws/i, + }); + + // Do the select option user event + // https://stackoverflow.com/a/69478957 + userEvent.selectOptions( + // Find the select element + within(topLevelNavigation).getByRole("combobox"), + // Find and select the Ireland option + within(topLevelNavigation).getByRole("option", { + name: "Credit Score Project", + }) + ); + + // The selection should updated + expect( + within(topLevelNavigation).getByRole("option", { + name: "Credit Score Project", + selected: true, + }) + ).toBeInTheDocument(); + + // ... and the new heading should appear + // meaning we successfully navigated + await screen.findByRole("heading", { + name: /credit_scoring_aws/i, + }); +}); diff --git a/ui/src/components/ProjectSelector.tsx b/ui/src/components/ProjectSelector.tsx new file mode 100644 index 0000000000..1bb7ebf85a --- /dev/null +++ b/ui/src/components/ProjectSelector.tsx @@ -0,0 +1,42 @@ +import { EuiSelect, useGeneratedHtmlId } from "@elastic/eui"; +import React from "react"; +import { useNavigate, useParams } from "react-router-dom"; +import { useLoadProjectsList } from "../contexts/ProjectListContext"; + +const ProjectSelector = () => { + const { projectName } = useParams(); + const navigate = useNavigate(); + + const { isLoading, data } = useLoadProjectsList(); + + const currentProject = data?.projects.find((project) => { + return project.id === projectName; + }); + + const options = data?.projects.map((p) => { + return { + value: p.id, + text: p.name, + }; + }); + + const basicSelectId = useGeneratedHtmlId({ prefix: "basicSelect" }); + const onChange = (e: React.ChangeEvent) => { + navigate(`/p/${e.target.value}`); + }; + + return ( + onChange(e)} + aria-label="Select a Feast Project" + /> + ); +}; + +export default ProjectSelector; diff --git a/ui/src/components/SearchTokensList.tsx b/ui/src/components/SearchTokensList.tsx new file mode 100644 index 0000000000..c382298d5a --- /dev/null +++ b/ui/src/components/SearchTokensList.tsx @@ -0,0 +1,38 @@ +import React from "react"; +import { EuiBadge, EuiFlexGroup, EuiFlexItem } from "@elastic/eui"; + +interface SearchTokensListProps { + tokens: string[]; + removeTokenByPosition: (tokenPosition: number) => void; +} + +const SearchTokensList = ({ + tokens, + removeTokenByPosition, +}: SearchTokensListProps) => { + return ( + + {tokens.map((token, index) => { + const badgeColor = token.indexOf(":") > 0 ? "primary" : "hollow"; + + return ( + + { + removeTokenByPosition(index); + }} + iconOnClickAriaLabel="Example of onClick event for icon within the button" + > + {token} + + + ); + })} + + ); +}; + +export default SearchTokensList; diff --git a/ui/src/components/SparklineHistogram.tsx b/ui/src/components/SparklineHistogram.tsx new file mode 100644 index 0000000000..bd632ec20d --- /dev/null +++ b/ui/src/components/SparklineHistogram.tsx @@ -0,0 +1,58 @@ +import React from "react"; +import { HistogramDataType } from "../parsers/featureViewSummaryStatistics"; +import { extent } from "d3-array"; +import { scaleLinear } from "d3"; +import { EuiBadge, useEuiTheme } from "@elastic/eui"; + +interface SparklineHistogramProps { + data: HistogramDataType; +} + +const SparklineHistogram = ({ data }: SparklineHistogramProps) => { + const width = 100; + const height = 24; + + const yMax = height - 2; + + const { euiTheme } = useEuiTheme(); + + if (data.length > 0) { + const x0Extent = extent(data, (d) => d.x0) as [number, number]; + const xScale = scaleLinear() + .domain(x0Extent) + .range([0, width - width / data.length]); + + const yExtent = extent(data, (d) => d.count) as [number, number]; + const yScale = scaleLinear().domain(yExtent).range([0, yMax]); + + return ( + + + {data.map((d) => { + const barHeight = yScale(d.count); + + return ( + + ); + })} + + ); + } else { + return histogram n/a; + } +}; + +export default SparklineHistogram; diff --git a/ui/src/components/TagSearch.tsx b/ui/src/components/TagSearch.tsx new file mode 100644 index 0000000000..e89d4a44cc --- /dev/null +++ b/ui/src/components/TagSearch.tsx @@ -0,0 +1,221 @@ +import { EuiTitle, EuiInputPopover, EuiSelectable } from "@elastic/eui"; + +import React, { useEffect, useRef, useState } from "react"; +import { + SuggestionModes, + TagSuggestionInstance, +} from "../hooks/useSearchInputWithTags"; + +interface TagSearchInterface { + currentTag: string; + tagsString: string; + setTagsString: (tagsString: string) => void; + acceptSuggestion: (suggestion: TagSuggestionInstance) => void; + tagSuggestions: TagSuggestionInstance[]; + suggestionMode: SuggestionModes; + setCursorPosition: (position: number | undefined) => void; +} + +interface SelectableOption { + label: string; + checked?: "on" | "off" | undefined; + suggestion: TagSuggestionInstance; +} + +// Helper Functions +const suggestionFormatter = (item: TagSuggestionInstance) => { + return { + label: item.suggestion, + suggestion: item, + showIcons: false, + append: {item.description}, + }; +}; + +const getCursorPosition = ( + inputNode: React.MutableRefObject +) => { + return inputNode.current?.selectionStart || undefined; +}; + +const computePlaceholderText = ( + tagSuggestions: TagSuggestionInstance[] | undefined +) => { + return !tagSuggestions + ? "" + : "e.g. " + + tagSuggestions + .slice(0, 2) + .map((s) => `"${s.suggestion}"`) + .join(" or "); +}; + +const generateResultsCount = ( + currentTag: string, + suggestionMode: SuggestionModes, + tagSuggestions: TagSuggestionInstance[] +) => { + let resultsCount = undefined; + + const currentTagIsEmpty = currentTag.length <= 0; + const currentTagHasNoValue = currentTag.split(":")[1] === ""; + const operatingWord = + currentTagIsEmpty || currentTagHasNoValue ? "possible" : "matching"; + const counterWord = suggestionMode === "KEY" ? `key` : `value`; + + if (tagSuggestions.length > 0) { + const isPlural = tagSuggestions.length > 1 ? "s" : ""; + resultsCount = ( + {`${tagSuggestions.length} ${operatingWord} ${counterWord}${isPlural}`} + ); + } + + return resultsCount; +}; + +// Hooks +const useInputHack = ( + setTagsString: (s: string) => void, + setCursorPosition: (n: number | undefined) => void +) => { + // HACK --- route around the lack of onChange + // See: https://github.com/elastic/eui/issues/5651 + const inputNode = useRef(null); + useEffect(() => { + const cb = () => { + const s: string = inputNode.current?.value || ""; + + setTagsString(s); + setCursorPosition(getCursorPosition(inputNode)); + }; + + const copiedNode = inputNode.current; + + if (copiedNode) { + copiedNode.addEventListener("input", cb); + } + + return () => { + if (copiedNode) { + copiedNode.removeEventListener("input", cb); + } + }; + }, [inputNode, setTagsString, setCursorPosition]); + + return inputNode; +}; + +const useSelectableOptions = ( + tagSuggestions: TagSuggestionInstance[], + acceptSuggestion: (suggestion: TagSuggestionInstance) => void +) => { + const [options, setOptions] = useState( + tagSuggestions ? tagSuggestions.map(suggestionFormatter) : [] + ); + + const onSelectableChange = (options: SelectableOption[]) => { + // Get the thing that just got "checked" + const clickedItem = options.find((option) => option.checked === "on"); + + if (clickedItem) { + acceptSuggestion(clickedItem.suggestion); + } + + setOptions(options); + }; + + useEffect(() => { + // Update options when new set of suggestions are passed down + setOptions(tagSuggestions.map(suggestionFormatter)); + }, [tagSuggestions, setOptions]); + + return { + options, + onSelectableChange, + }; +}; + +const TagSearch = ({ + currentTag, + tagsString, + setTagsString, + acceptSuggestion, + tagSuggestions, + suggestionMode, + setCursorPosition, +}: TagSearchInterface) => { + // HACK --- route around the lack of onChange + const inputNode = useInputHack(setTagsString, setCursorPosition); + + // Handling Suggestion Options + const { options, onSelectableChange } = useSelectableOptions( + tagSuggestions, // Gets turned into options + acceptSuggestion // Get triggered when an option is selected + ); + + // Using EuiInputPopover: https://elastic.github.io/eui/#/layout/popover + const [hasFocus, setHasFocus] = useState(false); + + // Props for EuiFieldSearch + const searchProps = { + value: tagsString, + inputRef: (node: HTMLInputElement | null) => { + // HTMLInputElement is hooked into useInputHack + inputNode.current = node; + }, + onfocus: () => { + setHasFocus(true); + }, + fullWidth: true, + placeholder: computePlaceholderText(tagSuggestions), + }; + + const resultsCount = generateResultsCount( + currentTag, + suggestionMode, + tagSuggestions + ); + + return ( + <> + +

Filter by Tags

+
+ { + setHasFocus(true); + }} + onBlur={() => { + setHasFocus(false); + }} + searchable={true} + isPreFiltered={true} + searchProps={searchProps} + aria-label="Filter by " + onChange={onSelectableChange} + options={options} + singleSelection={true} + listProps={{ bordered: true }} + > + {(list, search) => { + return ( + {search}} + isOpen={hasFocus} + closePopover={() => { + setHasFocus(false); + }} + > + {resultsCount} + {list} + + ); + }} + + + ); +}; + +export default TagSearch; diff --git a/ui/src/components/TagsDisplay.tsx b/ui/src/components/TagsDisplay.tsx new file mode 100644 index 0000000000..6f7f23b007 --- /dev/null +++ b/ui/src/components/TagsDisplay.tsx @@ -0,0 +1,37 @@ +import React from "react"; +import { + EuiDescriptionList, + EuiDescriptionListDescription, + EuiDescriptionListTitle, +} from "@elastic/eui"; +import EuiCustomLink from "./EuiCustomLink"; + +interface TagsDisplayProps { + createLink?: (key: string, value: string) => string; + tags: Record; +} + +const TagsDisplay = ({ tags, createLink }: TagsDisplayProps) => { + return ( + + {Object.entries(tags).map(([key, value]) => { + return ( + + {key} + + {createLink ? ( + + {value} + + ) : ( + value + )} + + + ); + })} + + ); +}; + +export default TagsDisplay; diff --git a/ui/src/contexts/FeatureFlagsContext.ts b/ui/src/contexts/FeatureFlagsContext.ts new file mode 100644 index 0000000000..4b065d2075 --- /dev/null +++ b/ui/src/contexts/FeatureFlagsContext.ts @@ -0,0 +1,10 @@ +import React from "react"; + +interface FeatureFlags { + enabledFeatureStatistics?: boolean; +} + +const FeatureFlagsContext = React.createContext({}); + +export default FeatureFlagsContext; +export type { FeatureFlags }; diff --git a/ui/src/contexts/ProjectListContext.ts b/ui/src/contexts/ProjectListContext.ts new file mode 100644 index 0000000000..3ae8b57298 --- /dev/null +++ b/ui/src/contexts/ProjectListContext.ts @@ -0,0 +1,131 @@ +import React, { useContext, useState } from "react"; +import { useQuery } from "react-query"; + +import { z } from "zod"; + +const ProjectEntrySchema = z.object({ + id: z.string(), + name: z.string(), + description: z.string().optional(), + registryPath: z.string(), +}); + +const ProjectsListSchema = z.object({ + default: z.string().optional(), + projects: z.array(ProjectEntrySchema), +}); + +type ProjectsListType = z.infer; +interface ProjectsListContextInterface { + projectsListPromise: Promise; + isCustom: boolean; +} + +const ProjectListContext = React.createContext< + ProjectsListContextInterface | undefined +>(undefined); + +class ProjectListError extends Error { + constructor(message?: string | undefined) { + super(message); + this.name = "FeastProjectListError"; + } +} + +const projectListExampleString = ` + +\`\`\`json +{ + "projects": [ + { + "name": "Credit Score Project", + "description": "Project for credit scoring team and associated models.", + "id": "credit_score_project", + "registryPath": "/registry.json" + } + ] +} +\`\`\` +`; + +const anticipatedProjectListErrors = ( + err: Error, + isCustomProjectList: boolean +) => { + const isSyntaxError = err.stack?.indexOf("SyntaxError") === 0; + + // Straight up not a JSON + if (isSyntaxError) { + const message = `Unable to properly parse Project List JSON. Check that your project list is formatted properly.`; + + return new ProjectListError(message); + } + + // Some sort of 404 + const isFailedToFetch = err.message.indexOf("Failed to fetch") > -1; + if (isFailedToFetch) { + const followUpMessage = isCustomProjectList + ? "Check that the promise in your Feast UI configuration is set up properly." + : "Did you create a `project-list.json` file in the `/public/` directory? e.g." + + projectListExampleString; + + const message = "Failed to fetch Project List JSON. " + followUpMessage; + + return new ProjectListError(message); + } + + return null; +}; + +const useLoadProjectsList = () => { + const projectListPromise = useContext(ProjectListContext); + // Use setState to surface errors in Error Boundaries + // https://github.com/facebook/react/issues/14981#issuecomment-468460187 + // eslint-disable-next-line @typescript-eslint/no-unused-vars + const [_, setQueryError] = useState(undefined); + + return useQuery( + "feast-projects-list", + () => { + return projectListPromise?.projectsListPromise + .catch((e) => { + const anticipatedError = anticipatedProjectListErrors( + e, + projectListPromise.isCustom + ); + setQueryError(() => { + if (anticipatedError) { + throw anticipatedError; + } else { + throw new Error(e); + } + }); + }) + .then((json) => { + try { + const configs = ProjectsListSchema.parse(json); + return configs; + } catch (e) { + // If a json object is returned, but + // does not adhere to our anticipated + // format. + setQueryError(() => { + throw new ProjectListError( + `Error parsing project list JSON. JSON Object does not match expected type for a Feast project list. A project list JSON file should look like + ${projectListExampleString} + Zod (our parser) returned the following: \n\n${e}` + ); + }); + + throw new Error("Zod Error"); + } + }); + }, + { + enabled: !!projectListPromise?.projectsListPromise, + } + ); +}; + +export { ProjectListContext, ProjectsListSchema, useLoadProjectsList }; +export type { ProjectsListType, ProjectsListContextInterface }; diff --git a/ui/src/contexts/RegistryPathContext.ts b/ui/src/contexts/RegistryPathContext.ts new file mode 100644 index 0000000000..89eac2a7cd --- /dev/null +++ b/ui/src/contexts/RegistryPathContext.ts @@ -0,0 +1,5 @@ +import React from "react"; + +const RegistryPathContext = React.createContext("/registry.json"); + +export default RegistryPathContext; diff --git a/ui/src/custom-tabs/TabsRegistryContext.tsx b/ui/src/custom-tabs/TabsRegistryContext.tsx new file mode 100644 index 0000000000..8ad58d7a16 --- /dev/null +++ b/ui/src/custom-tabs/TabsRegistryContext.tsx @@ -0,0 +1,257 @@ +import React, { useEffect, useState } from "react"; + +import { + useResolvedPath, + resolvePath, + useLocation, + NavigateFunction, + Route, +} from "react-router-dom"; + +import RegularFeatureViewCustomTabLoadingWrapper from "../utils/custom-tabs/RegularFeatureViewCustomTabLoadingWrapper"; +import OnDemandFeatureViewCustomTabLoadingWrapper from "../utils/custom-tabs/OnDemandFeatureViewCustomTabLoadingWrapper"; +import FeatureServiceCustomTabLoadingWrapper from "../utils/custom-tabs/FeatureServiceCustomTabLoadingWrapper"; +import DataSourceCustomTabLoadingWrapper from "../utils/custom-tabs/DataSourceCustomTabLoadingWrapper"; +import EntityCustomTabLoadingWrapper from "../utils/custom-tabs/EntityCustomTabLoadingWrapper"; +import DatasetCustomTabLoadingWrapper from "../utils/custom-tabs/DatasetCustomTabLoadingWrapper"; + +import { + RegularFeatureViewCustomTabRegistrationInterface, + OnDemandFeatureViewCustomTabRegistrationInterface, + FeatureServiceCustomTabRegistrationInterface, + DataSourceCustomTabRegistrationInterface, + EntityCustomTabRegistrationInterface, + DatasetCustomTabRegistrationInterface, + CustomTabRegistrationInterface, +} from "./types"; + +interface FeastTabsRegistryInterface { + RegularFeatureViewCustomTabs?: RegularFeatureViewCustomTabRegistrationInterface[]; + OnDemandFeatureViewCustomTabs?: OnDemandFeatureViewCustomTabRegistrationInterface[]; + FeatureServiceCustomTabs?: FeatureServiceCustomTabRegistrationInterface[]; + DataSourceCustomTabs?: DataSourceCustomTabRegistrationInterface[]; + EntityCustomTabs?: EntityCustomTabRegistrationInterface[]; + DatasetCustomTabs?: DatasetCustomTabRegistrationInterface[]; +} + +interface NavigationTabInterface { + label: string; + isSelected: boolean; + onClick: () => void; +} + +const TabsRegistryContext = React.createContext({}); + +const useGenericCustomTabsNavigation = < + T extends CustomTabRegistrationInterface +>( + entries: T[], + navigate: NavigateFunction +) => { + // Check for Duplicates + const arrayOfPaths = entries.map((tab) => tab.path); + + const duplicatedPaths = arrayOfPaths.filter( + (item, index) => arrayOfPaths.indexOf(item) !== index + ); + + // Throw error if multiple custom tabs being registered to the same path + if (duplicatedPaths.length) { + throw new Error( + `More than one tabs registered for path url: ${duplicatedPaths.join( + ", " + )}` + ); + } + + const [customNavigationTabs, setTabs] = useState( + [] + ); + + const featureViewRoot = useResolvedPath(""); // Root of Feature View Section + const { pathname } = useLocation(); // Current Location + + useEffect(() => { + setTabs( + entries.map(({ label, path }) => { + const resolvedTabPath = resolvePath(path, featureViewRoot.pathname); + + return { + label, + // Can't use the match hooks here b/c we're in a loop due + // to React hooks needing a predictable number of + // hooks to be run. See: https://reactjs.org/docs/hooks-rules.html + isSelected: pathname === resolvedTabPath.pathname, + onClick: () => { + navigate(path); + }, + }; + }) + ); + }, [pathname, navigate, featureViewRoot.pathname, entries]); + + return { + customNavigationTabs, + }; +}; + +// Creating Routes +interface InnerComponent { + label: string; + path: string; + Component: (props: T) => JSX.Element; +} +type WrapperComponentType = ({ + Component, +}: { + Component: (props: T) => JSX.Element; +}) => JSX.Element; + +const genericCustomTabRoutes = ( + tabs: InnerComponent[], + WrapperComponent: WrapperComponentType +) => { + return tabs.map(({ path, Component }) => { + const WrappedComponent = () => { + return ; + }; + + return ( + } /> + ); + }); +}; + +// Navigation Hooks for Each Custom Tab Type +const useRegularFeatureViewCustomTabs = (navigate: NavigateFunction) => { + const { RegularFeatureViewCustomTabs } = + React.useContext(TabsRegistryContext); + + return useGenericCustomTabsNavigation( + RegularFeatureViewCustomTabs || [], + navigate + ); +}; + +const useOnDemandFeatureViewCustomTabs = (navigate: NavigateFunction) => { + const { OnDemandFeatureViewCustomTabs } = + React.useContext(TabsRegistryContext); + + return useGenericCustomTabsNavigation( + OnDemandFeatureViewCustomTabs || [], + navigate + ); +}; + +const useFeatureServiceCustomTabs = (navigate: NavigateFunction) => { + const { FeatureServiceCustomTabs } = React.useContext(TabsRegistryContext); + + return useGenericCustomTabsNavigation( + FeatureServiceCustomTabs || [], + navigate + ); +}; + +const useDataSourceCustomTabs = (navigate: NavigateFunction) => { + const { DataSourceCustomTabs } = React.useContext(TabsRegistryContext); + + return useGenericCustomTabsNavigation( + DataSourceCustomTabs || [], + navigate + ); +}; + +const useEntityCustomTabs = (navigate: NavigateFunction) => { + const { EntityCustomTabs } = React.useContext(TabsRegistryContext); + + return useGenericCustomTabsNavigation( + EntityCustomTabs || [], + navigate + ); +}; + +const useDatasetCustomTabs = (navigate: NavigateFunction) => { + const { DatasetCustomTabs } = React.useContext(TabsRegistryContext); + + return useGenericCustomTabsNavigation( + DatasetCustomTabs || [], + navigate + ); +}; + +// Routes for Each Custom Tab Type +const useRegularFeatureViewCustomTabRoutes = () => { + const { RegularFeatureViewCustomTabs } = + React.useContext(TabsRegistryContext); + + return genericCustomTabRoutes( + RegularFeatureViewCustomTabs || [], + RegularFeatureViewCustomTabLoadingWrapper + ); +}; + +const useOnDemandFeatureViewCustomTabRoutes = () => { + const { OnDemandFeatureViewCustomTabs } = + React.useContext(TabsRegistryContext); + + return genericCustomTabRoutes( + OnDemandFeatureViewCustomTabs || [], + OnDemandFeatureViewCustomTabLoadingWrapper + ); +}; + +const useFeatureServiceCustomTabRoutes = () => { + const { FeatureServiceCustomTabs } = React.useContext(TabsRegistryContext); + + return genericCustomTabRoutes( + FeatureServiceCustomTabs || [], + FeatureServiceCustomTabLoadingWrapper + ); +}; + +const useDataSourceCustomTabRoutes = () => { + const { DataSourceCustomTabs } = React.useContext(TabsRegistryContext); + + return genericCustomTabRoutes( + DataSourceCustomTabs || [], + DataSourceCustomTabLoadingWrapper + ); +}; + +const useEntityCustomTabRoutes = () => { + const { EntityCustomTabs } = React.useContext(TabsRegistryContext); + + return genericCustomTabRoutes( + EntityCustomTabs || [], + EntityCustomTabLoadingWrapper + ); +}; + +const useDatasetCustomTabRoutes = () => { + const { DatasetCustomTabs } = React.useContext(TabsRegistryContext); + + return genericCustomTabRoutes( + DatasetCustomTabs || [], + DatasetCustomTabLoadingWrapper + ); +}; + +export default TabsRegistryContext; +export { + // Navigation + useRegularFeatureViewCustomTabs, + useOnDemandFeatureViewCustomTabs, + useFeatureServiceCustomTabs, + useDataSourceCustomTabs, + useEntityCustomTabs, + useDatasetCustomTabs, + // Routes + useRegularFeatureViewCustomTabRoutes, + useOnDemandFeatureViewCustomTabRoutes, + useFeatureServiceCustomTabRoutes, + useDataSourceCustomTabRoutes, + useEntityCustomTabRoutes, + useDatasetCustomTabRoutes, +}; + +export type { FeastTabsRegistryInterface }; diff --git a/ui/src/custom-tabs/data-source-demo-tab/DemoCustomTab.tsx b/ui/src/custom-tabs/data-source-demo-tab/DemoCustomTab.tsx new file mode 100644 index 0000000000..79ba01d7aa --- /dev/null +++ b/ui/src/custom-tabs/data-source-demo-tab/DemoCustomTab.tsx @@ -0,0 +1,82 @@ +import React from "react"; + +import { + // Feature View Custom Tabs will get these props + DataSourceCustomTabProps, +} from "../types"; + +import { + EuiLoadingContent, + EuiEmptyPrompt, + EuiFlexGroup, + EuiFlexItem, + EuiCode, + EuiSpacer, +} from "@elastic/eui"; + +// Separating out the query is not required, +// but encouraged for code readability +import useDemoQuery from "./useDemoQuery"; + +const DemoCustomTab = ({ id, feastObjectQuery }: DataSourceCustomTabProps) => { + // Use React Query to fetch data + // that is custom to this tab. + // See: https://react-query.tanstack.com/guides/queries + const { isLoading, isError, isSuccess, data } = useDemoQuery({ + featureView: id, + }); + + if (isLoading) { + // Handle Loading State + // https://elastic.github.io/eui/#/display/loading + return ; + } + + if (isError) { + // Handle Data Fetching Error + // https://elastic.github.io/eui/#/display/empty-prompt + return ( + Unable to load your demo page} + body={ +

+ There was an error loading the Dashboard application. Contact your + administrator for help. +

+ } + /> + ); + } + + // Feast UI uses the Elastic UI component system. + // and are particularly + // useful for layouts. + return ( + + + +

Hello World. The following is fetched data.

+ + {isSuccess && data && ( + +
{JSON.stringify(data, null, 2)}
+
+ )} +
+ +

... and this is data from Feast UI’s own query.

+ + {feastObjectQuery.isSuccess && feastObjectQuery.data && ( + +
{JSON.stringify(feastObjectQuery.data, null, 2)}
+
+ )} +
+
+
+ ); +}; + +export default DemoCustomTab; diff --git a/ui/src/custom-tabs/data-source-demo-tab/useDemoQuery.tsx b/ui/src/custom-tabs/data-source-demo-tab/useDemoQuery.tsx new file mode 100644 index 0000000000..b93602dbe3 --- /dev/null +++ b/ui/src/custom-tabs/data-source-demo-tab/useDemoQuery.tsx @@ -0,0 +1,44 @@ +import { useQuery } from "react-query"; +import { z } from "zod"; + +// Use Zod to check the shape of the +// json object being loaded +const demoSchema = z.object({ + hello: z.string(), + name: z.string().optional(), +}); + +// Make the type of the object available +type DemoDataType = z.infer; + +interface DemoQueryInterface { + featureView: string | undefined; +} + +const useDemoQuery = ({ featureView }: DemoQueryInterface) => { + // React Query manages caching for you based on query keys + // See: https://react-query.tanstack.com/guides/query-keys + const queryKey = `demo-tab-namespace:${featureView}`; + + // Pass the type to useQuery + // so that components consuming the + // result gets nice type hints + // on the other side. + return useQuery( + queryKey, + () => { + // Customizing the URL based on your needs + const url = `/demo-custom-tabs/demo.json`; + + return fetch(url) + .then((res) => res.json()) + .then((data) => demoSchema.parse(data)); // Use zod to parse results + }, + { + enabled: !!featureView, // Only start the query when the variable is not undefined + } + ); +}; + +export default useDemoQuery; +export type { DemoDataType }; diff --git a/ui/src/custom-tabs/dataset-demo-tab/DemoCustomTab.tsx b/ui/src/custom-tabs/dataset-demo-tab/DemoCustomTab.tsx new file mode 100644 index 0000000000..37038b84a9 --- /dev/null +++ b/ui/src/custom-tabs/dataset-demo-tab/DemoCustomTab.tsx @@ -0,0 +1,82 @@ +import React from "react"; + +import { + // Feature View Custom Tabs will get these props + DatasetCustomTabProps, +} from "../types"; + +import { + EuiLoadingContent, + EuiEmptyPrompt, + EuiFlexGroup, + EuiFlexItem, + EuiCode, + EuiSpacer, +} from "@elastic/eui"; + +// Separating out the query is not required, +// but encouraged for code readability +import useDemoQuery from "./useDemoQuery"; + +const DemoCustomTab = ({ id, feastObjectQuery }: DatasetCustomTabProps) => { + // Use React Query to fetch data + // that is custom to this tab. + // See: https://react-query.tanstack.com/guides/queries + const { isLoading, isError, isSuccess, data } = useDemoQuery({ + featureView: id, + }); + + if (isLoading) { + // Handle Loading State + // https://elastic.github.io/eui/#/display/loading + return ; + } + + if (isError) { + // Handle Data Fetching Error + // https://elastic.github.io/eui/#/display/empty-prompt + return ( + Unable to load your demo page} + body={ +

+ There was an error loading the Dashboard application. Contact your + administrator for help. +

+ } + /> + ); + } + + // Feast UI uses the Elastic UI component system. + // and are particularly + // useful for layouts. + return ( + + + +

Hello World. The following is fetched data.

+ + {isSuccess && data && ( + +
{JSON.stringify(data, null, 2)}
+
+ )} +
+ +

... and this is data from Feast UI’s own query.

+ + {feastObjectQuery.isSuccess && feastObjectQuery.data && ( + +
{JSON.stringify(feastObjectQuery.data, null, 2)}
+
+ )} +
+
+
+ ); +}; + +export default DemoCustomTab; diff --git a/ui/src/custom-tabs/dataset-demo-tab/useDemoQuery.tsx b/ui/src/custom-tabs/dataset-demo-tab/useDemoQuery.tsx new file mode 100644 index 0000000000..b93602dbe3 --- /dev/null +++ b/ui/src/custom-tabs/dataset-demo-tab/useDemoQuery.tsx @@ -0,0 +1,44 @@ +import { useQuery } from "react-query"; +import { z } from "zod"; + +// Use Zod to check the shape of the +// json object being loaded +const demoSchema = z.object({ + hello: z.string(), + name: z.string().optional(), +}); + +// Make the type of the object available +type DemoDataType = z.infer; + +interface DemoQueryInterface { + featureView: string | undefined; +} + +const useDemoQuery = ({ featureView }: DemoQueryInterface) => { + // React Query manages caching for you based on query keys + // See: https://react-query.tanstack.com/guides/query-keys + const queryKey = `demo-tab-namespace:${featureView}`; + + // Pass the type to useQuery + // so that components consuming the + // result gets nice type hints + // on the other side. + return useQuery( + queryKey, + () => { + // Customizing the URL based on your needs + const url = `/demo-custom-tabs/demo.json`; + + return fetch(url) + .then((res) => res.json()) + .then((data) => demoSchema.parse(data)); // Use zod to parse results + }, + { + enabled: !!featureView, // Only start the query when the variable is not undefined + } + ); +}; + +export default useDemoQuery; +export type { DemoDataType }; diff --git a/ui/src/custom-tabs/entity-demo-tab/DemoCustomTab.tsx b/ui/src/custom-tabs/entity-demo-tab/DemoCustomTab.tsx new file mode 100644 index 0000000000..6e4a87b570 --- /dev/null +++ b/ui/src/custom-tabs/entity-demo-tab/DemoCustomTab.tsx @@ -0,0 +1,82 @@ +import React from "react"; + +import { + // Feature View Custom Tabs will get these props + EntityCustomTabProps, +} from "../types"; + +import { + EuiLoadingContent, + EuiEmptyPrompt, + EuiFlexGroup, + EuiFlexItem, + EuiCode, + EuiSpacer, +} from "@elastic/eui"; + +// Separating out the query is not required, +// but encouraged for code readability +import useDemoQuery from "./useDemoQuery"; + +const DemoCustomTab = ({ id, feastObjectQuery }: EntityCustomTabProps) => { + // Use React Query to fetch data + // that is custom to this tab. + // See: https://react-query.tanstack.com/guides/queries + const { isLoading, isError, isSuccess, data } = useDemoQuery({ + featureView: id, + }); + + if (isLoading) { + // Handle Loading State + // https://elastic.github.io/eui/#/display/loading + return ; + } + + if (isError) { + // Handle Data Fetching Error + // https://elastic.github.io/eui/#/display/empty-prompt + return ( + Unable to load your demo page} + body={ +

+ There was an error loading the Dashboard application. Contact your + administrator for help. +

+ } + /> + ); + } + + // Feast UI uses the Elastic UI component system. + // and are particularly + // useful for layouts. + return ( + + + +

Hello World. The following is fetched data.

+ + {isSuccess && data && ( + +
{JSON.stringify(data, null, 2)}
+
+ )} +
+ +

... and this is data from Feast UI’s own query.

+ + {feastObjectQuery.isSuccess && feastObjectQuery.data && ( + +
{JSON.stringify(feastObjectQuery.data, null, 2)}
+
+ )} +
+
+
+ ); +}; + +export default DemoCustomTab; diff --git a/ui/src/custom-tabs/entity-demo-tab/useDemoQuery.tsx b/ui/src/custom-tabs/entity-demo-tab/useDemoQuery.tsx new file mode 100644 index 0000000000..b93602dbe3 --- /dev/null +++ b/ui/src/custom-tabs/entity-demo-tab/useDemoQuery.tsx @@ -0,0 +1,44 @@ +import { useQuery } from "react-query"; +import { z } from "zod"; + +// Use Zod to check the shape of the +// json object being loaded +const demoSchema = z.object({ + hello: z.string(), + name: z.string().optional(), +}); + +// Make the type of the object available +type DemoDataType = z.infer; + +interface DemoQueryInterface { + featureView: string | undefined; +} + +const useDemoQuery = ({ featureView }: DemoQueryInterface) => { + // React Query manages caching for you based on query keys + // See: https://react-query.tanstack.com/guides/query-keys + const queryKey = `demo-tab-namespace:${featureView}`; + + // Pass the type to useQuery + // so that components consuming the + // result gets nice type hints + // on the other side. + return useQuery( + queryKey, + () => { + // Customizing the URL based on your needs + const url = `/demo-custom-tabs/demo.json`; + + return fetch(url) + .then((res) => res.json()) + .then((data) => demoSchema.parse(data)); // Use zod to parse results + }, + { + enabled: !!featureView, // Only start the query when the variable is not undefined + } + ); +}; + +export default useDemoQuery; +export type { DemoDataType }; diff --git a/ui/src/custom-tabs/feature-service-demo-tab/DemoCustomTab.tsx b/ui/src/custom-tabs/feature-service-demo-tab/DemoCustomTab.tsx new file mode 100644 index 0000000000..724c2504aa --- /dev/null +++ b/ui/src/custom-tabs/feature-service-demo-tab/DemoCustomTab.tsx @@ -0,0 +1,85 @@ +import React from "react"; + +import { + // Feature View Custom Tabs will get these props + FeatureServiceCustomTabProps, +} from "../types"; + +import { + EuiLoadingContent, + EuiEmptyPrompt, + EuiFlexGroup, + EuiFlexItem, + EuiCode, + EuiSpacer, +} from "@elastic/eui"; + +// Separating out the query is not required, +// but encouraged for code readability +import useDemoQuery from "./useDemoQuery"; + +const DemoCustomTab = ({ + id, + feastObjectQuery, +}: FeatureServiceCustomTabProps) => { + // Use React Query to fetch data + // that is custom to this tab. + // See: https://react-query.tanstack.com/guides/queries + const { isLoading, isError, isSuccess, data } = useDemoQuery({ + featureView: id, + }); + + if (isLoading) { + // Handle Loading State + // https://elastic.github.io/eui/#/display/loading + return ; + } + + if (isError) { + // Handle Data Fetching Error + // https://elastic.github.io/eui/#/display/empty-prompt + return ( + Unable to load your demo page} + body={ +

+ There was an error loading the Dashboard application. Contact your + administrator for help. +

+ } + /> + ); + } + + // Feast UI uses the Elastic UI component system. + // and are particularly + // useful for layouts. + return ( + + + +

Hello World. The following is fetched data.

+ + {isSuccess && data && ( + +
{JSON.stringify(data, null, 2)}
+
+ )} +
+ +

... and this is data from Feast UI’s own query.

+ + {feastObjectQuery.isSuccess && feastObjectQuery.data && ( + +
{JSON.stringify(feastObjectQuery.data, null, 2)}
+
+ )} +
+
+
+ ); +}; + +export default DemoCustomTab; diff --git a/ui/src/custom-tabs/feature-service-demo-tab/useDemoQuery.tsx b/ui/src/custom-tabs/feature-service-demo-tab/useDemoQuery.tsx new file mode 100644 index 0000000000..b93602dbe3 --- /dev/null +++ b/ui/src/custom-tabs/feature-service-demo-tab/useDemoQuery.tsx @@ -0,0 +1,44 @@ +import { useQuery } from "react-query"; +import { z } from "zod"; + +// Use Zod to check the shape of the +// json object being loaded +const demoSchema = z.object({ + hello: z.string(), + name: z.string().optional(), +}); + +// Make the type of the object available +type DemoDataType = z.infer; + +interface DemoQueryInterface { + featureView: string | undefined; +} + +const useDemoQuery = ({ featureView }: DemoQueryInterface) => { + // React Query manages caching for you based on query keys + // See: https://react-query.tanstack.com/guides/query-keys + const queryKey = `demo-tab-namespace:${featureView}`; + + // Pass the type to useQuery + // so that components consuming the + // result gets nice type hints + // on the other side. + return useQuery( + queryKey, + () => { + // Customizing the URL based on your needs + const url = `/demo-custom-tabs/demo.json`; + + return fetch(url) + .then((res) => res.json()) + .then((data) => demoSchema.parse(data)); // Use zod to parse results + }, + { + enabled: !!featureView, // Only start the query when the variable is not undefined + } + ); +}; + +export default useDemoQuery; +export type { DemoDataType }; diff --git a/ui/src/custom-tabs/ondemand-fv-demo-tab/DemoCustomTab.tsx b/ui/src/custom-tabs/ondemand-fv-demo-tab/DemoCustomTab.tsx new file mode 100644 index 0000000000..dd6f3ab654 --- /dev/null +++ b/ui/src/custom-tabs/ondemand-fv-demo-tab/DemoCustomTab.tsx @@ -0,0 +1,85 @@ +import React from "react"; + +import { + // Feature View Custom Tabs will get these props + OnDemandFeatureViewCustomTabProps, +} from "../types"; + +import { + EuiLoadingContent, + EuiEmptyPrompt, + EuiFlexGroup, + EuiFlexItem, + EuiCode, + EuiSpacer, +} from "@elastic/eui"; + +// Separating out the query is not required, +// but encouraged for code readability +import useDemoQuery from "./useDemoQuery"; + +const DemoCustomTab = ({ + id, + feastObjectQuery, +}: OnDemandFeatureViewCustomTabProps) => { + // Use React Query to fetch data + // that is custom to this tab. + // See: https://react-query.tanstack.com/guides/queries + const { isLoading, isError, isSuccess, data } = useDemoQuery({ + featureView: id, + }); + + if (isLoading) { + // Handle Loading State + // https://elastic.github.io/eui/#/display/loading + return ; + } + + if (isError) { + // Handle Data Fetching Error + // https://elastic.github.io/eui/#/display/empty-prompt + return ( + Unable to load your demo page} + body={ +

+ There was an error loading the Dashboard application. Contact your + administrator for help. +

+ } + /> + ); + } + + // Feast UI uses the Elastic UI component system. + // and are particularly + // useful for layouts. + return ( + + + +

Hello World. The following is fetched data.

+ + {isSuccess && data && ( + +
{JSON.stringify(data, null, 2)}
+
+ )} +
+ +

... and this is data from Feast UI’s own query.

+ + {feastObjectQuery.isSuccess && feastObjectQuery.data && ( + +
{JSON.stringify(feastObjectQuery.data, null, 2)}
+
+ )} +
+
+
+ ); +}; + +export default DemoCustomTab; diff --git a/ui/src/custom-tabs/ondemand-fv-demo-tab/useDemoQuery.tsx b/ui/src/custom-tabs/ondemand-fv-demo-tab/useDemoQuery.tsx new file mode 100644 index 0000000000..b93602dbe3 --- /dev/null +++ b/ui/src/custom-tabs/ondemand-fv-demo-tab/useDemoQuery.tsx @@ -0,0 +1,44 @@ +import { useQuery } from "react-query"; +import { z } from "zod"; + +// Use Zod to check the shape of the +// json object being loaded +const demoSchema = z.object({ + hello: z.string(), + name: z.string().optional(), +}); + +// Make the type of the object available +type DemoDataType = z.infer; + +interface DemoQueryInterface { + featureView: string | undefined; +} + +const useDemoQuery = ({ featureView }: DemoQueryInterface) => { + // React Query manages caching for you based on query keys + // See: https://react-query.tanstack.com/guides/query-keys + const queryKey = `demo-tab-namespace:${featureView}`; + + // Pass the type to useQuery + // so that components consuming the + // result gets nice type hints + // on the other side. + return useQuery( + queryKey, + () => { + // Customizing the URL based on your needs + const url = `/demo-custom-tabs/demo.json`; + + return fetch(url) + .then((res) => res.json()) + .then((data) => demoSchema.parse(data)); // Use zod to parse results + }, + { + enabled: !!featureView, // Only start the query when the variable is not undefined + } + ); +}; + +export default useDemoQuery; +export type { DemoDataType }; diff --git a/ui/src/custom-tabs/reguar-fv-demo-tab/DemoCustomTab.tsx b/ui/src/custom-tabs/reguar-fv-demo-tab/DemoCustomTab.tsx new file mode 100644 index 0000000000..2ce1b4e64b --- /dev/null +++ b/ui/src/custom-tabs/reguar-fv-demo-tab/DemoCustomTab.tsx @@ -0,0 +1,85 @@ +import React from "react"; + +import { + // Feature View Custom Tabs will get these props + RegularFeatureViewCustomTabProps, +} from "../types"; + +import { + EuiLoadingContent, + EuiEmptyPrompt, + EuiFlexGroup, + EuiFlexItem, + EuiCode, + EuiSpacer, +} from "@elastic/eui"; + +// Separating out the query is not required, +// but encouraged for code readability +import useDemoQuery from "./useDemoQuery"; + +const DemoCustomTab = ({ + id, + feastObjectQuery, +}: RegularFeatureViewCustomTabProps) => { + // Use React Query to fetch data + // that is custom to this tab. + // See: https://react-query.tanstack.com/guides/queries + const { isLoading, isError, isSuccess, data } = useDemoQuery({ + featureView: id, + }); + + if (isLoading) { + // Handle Loading State + // https://elastic.github.io/eui/#/display/loading + return ; + } + + if (isError) { + // Handle Data Fetching Error + // https://elastic.github.io/eui/#/display/empty-prompt + return ( + Unable to load your demo page} + body={ +

+ There was an error loading the Dashboard application. Contact your + administrator for help. +

+ } + /> + ); + } + + // Feast UI uses the Elastic UI component system. + // and are particularly + // useful for layouts. + return ( + + + +

Hello World. The following is fetched data.

+ + {isSuccess && data && ( + +
{JSON.stringify(data, null, 2)}
+
+ )} +
+ +

... and this is data from Feast UI’s own query.

+ + {feastObjectQuery.isSuccess && feastObjectQuery.data && ( + +
{JSON.stringify(feastObjectQuery.data, null, 2)}
+
+ )} +
+
+
+ ); +}; + +export default DemoCustomTab; diff --git a/ui/src/custom-tabs/reguar-fv-demo-tab/useDemoQuery.tsx b/ui/src/custom-tabs/reguar-fv-demo-tab/useDemoQuery.tsx new file mode 100644 index 0000000000..b93602dbe3 --- /dev/null +++ b/ui/src/custom-tabs/reguar-fv-demo-tab/useDemoQuery.tsx @@ -0,0 +1,44 @@ +import { useQuery } from "react-query"; +import { z } from "zod"; + +// Use Zod to check the shape of the +// json object being loaded +const demoSchema = z.object({ + hello: z.string(), + name: z.string().optional(), +}); + +// Make the type of the object available +type DemoDataType = z.infer; + +interface DemoQueryInterface { + featureView: string | undefined; +} + +const useDemoQuery = ({ featureView }: DemoQueryInterface) => { + // React Query manages caching for you based on query keys + // See: https://react-query.tanstack.com/guides/query-keys + const queryKey = `demo-tab-namespace:${featureView}`; + + // Pass the type to useQuery + // so that components consuming the + // result gets nice type hints + // on the other side. + return useQuery( + queryKey, + () => { + // Customizing the URL based on your needs + const url = `/demo-custom-tabs/demo.json`; + + return fetch(url) + .then((res) => res.json()) + .then((data) => demoSchema.parse(data)); // Use zod to parse results + }, + { + enabled: !!featureView, // Only start the query when the variable is not undefined + } + ); +}; + +export default useDemoQuery; +export type { DemoDataType }; diff --git a/ui/src/custom-tabs/types.ts b/ui/src/custom-tabs/types.ts new file mode 100644 index 0000000000..f80c56d0e2 --- /dev/null +++ b/ui/src/custom-tabs/types.ts @@ -0,0 +1,122 @@ +import { + useLoadOnDemandFeatureView, + useLoadRegularFeatureView, +} from "../pages/feature-views/useLoadFeatureView"; +import useLoadFeatureService from "../pages/feature-services/useLoadFeatureService"; +import useLoadDataSource from "../pages/data-sources/useLoadDataSource"; +import useLoadEntity from "../pages/entities/useLoadEntity"; +import useLoadDataset from "../pages/saved-data-sets/useLoadDataset"; + +interface CustomTabRegistrationInterface { + label: string; + path: string; + Component: (...args: any[]) => JSX.Element; +} + +// Type for Regular Feature View Custom Tabs +type RegularFeatureViewQueryReturnType = ReturnType< + typeof useLoadRegularFeatureView +>; +interface RegularFeatureViewCustomTabProps { + id: string | undefined; + feastObjectQuery: RegularFeatureViewQueryReturnType; +} +interface RegularFeatureViewCustomTabRegistrationInterface + extends CustomTabRegistrationInterface { + Component: ({ + id, + feastObjectQuery, + ...args + }: RegularFeatureViewCustomTabProps) => JSX.Element; +} + +// Type for OnDemand Feature View Custom Tabs +type OnDemandFeatureViewQueryReturnType = ReturnType< + typeof useLoadOnDemandFeatureView +>; +interface OnDemandFeatureViewCustomTabProps { + id: string | undefined; + feastObjectQuery: OnDemandFeatureViewQueryReturnType; +} +interface OnDemandFeatureViewCustomTabRegistrationInterface + extends CustomTabRegistrationInterface { + Component: ({ + id, + feastObjectQuery, + ...args + }: OnDemandFeatureViewCustomTabProps) => JSX.Element; +} + +// Type for Feature Service Custom Tabs +interface EntityCustomTabProps { + id: string | undefined; + feastObjectQuery: ReturnType; +} +interface EntityCustomTabRegistrationInterface + extends CustomTabRegistrationInterface { + Component: ({ + id, + feastObjectQuery, + ...args + }: EntityCustomTabProps) => JSX.Element; +} + +// Type for Feature Service Custom Tabs +interface FeatureServiceCustomTabProps { + id: string | undefined; + feastObjectQuery: ReturnType; +} +interface FeatureServiceCustomTabRegistrationInterface + extends CustomTabRegistrationInterface { + Component: ({ + id, + feastObjectQuery, + ...args + }: FeatureServiceCustomTabProps) => JSX.Element; +} + +// Type for Data Source Custom Tabs +interface DataSourceCustomTabProps { + id: string | undefined; + feastObjectQuery: ReturnType; +} +interface DataSourceCustomTabRegistrationInterface + extends CustomTabRegistrationInterface { + Component: ({ + id, + feastObjectQuery, + ...args + }: DataSourceCustomTabProps) => JSX.Element; +} + +// Type for Data Source Custom Tabs +interface DatasetCustomTabProps { + id: string | undefined; + feastObjectQuery: ReturnType; +} +interface DatasetCustomTabRegistrationInterface + extends CustomTabRegistrationInterface { + Component: ({ + id, + feastObjectQuery, + ...args + }: DatasetCustomTabProps) => JSX.Element; +} + +export type { + CustomTabRegistrationInterface, + RegularFeatureViewQueryReturnType, + RegularFeatureViewCustomTabRegistrationInterface, + RegularFeatureViewCustomTabProps, + OnDemandFeatureViewQueryReturnType, + OnDemandFeatureViewCustomTabProps, + OnDemandFeatureViewCustomTabRegistrationInterface, + FeatureServiceCustomTabRegistrationInterface, + FeatureServiceCustomTabProps, + DataSourceCustomTabRegistrationInterface, + DataSourceCustomTabProps, + EntityCustomTabRegistrationInterface, + EntityCustomTabProps, + DatasetCustomTabRegistrationInterface, + DatasetCustomTabProps, +}; diff --git a/ui/src/graphics/DataSourceIcon.tsx b/ui/src/graphics/DataSourceIcon.tsx new file mode 100644 index 0000000000..fdd92b8773 --- /dev/null +++ b/ui/src/graphics/DataSourceIcon.tsx @@ -0,0 +1,43 @@ +import React from "react"; + +const DataSourceIcon = ({ + size, + className, +}: { + size: number; + className?: string; +}) => { + return ( + + + + + ); +}; + +const DataSourceIcon16 = () => { + return ; +}; + +const DataSourceIcon32 = () => { + return ( + + ); +}; + +export { DataSourceIcon, DataSourceIcon16, DataSourceIcon32 }; diff --git a/ui/src/graphics/DatasetIcon.tsx b/ui/src/graphics/DatasetIcon.tsx new file mode 100644 index 0000000000..5c28f76f1d --- /dev/null +++ b/ui/src/graphics/DatasetIcon.tsx @@ -0,0 +1,52 @@ +import React from "react"; + +const DatasetIcon = ({ + size, + className, +}: { + size: number; + className?: string; +}) => { + return ( + + + + + + + + + + ); +}; + +const DatasetIcon16 = () => { + return ; +}; + +const DatasetIcon32 = () => { + return ( + + ); +}; + +export { DatasetIcon, DatasetIcon16, DatasetIcon32 }; diff --git a/ui/src/graphics/EntityIcon.tsx b/ui/src/graphics/EntityIcon.tsx new file mode 100644 index 0000000000..d9daf542a6 --- /dev/null +++ b/ui/src/graphics/EntityIcon.tsx @@ -0,0 +1,46 @@ +import React from "react"; + +const EntityIcon = ({ + size, + className, +}: { + size: number; + className?: string; +}) => { + return ( + + + + + ); +}; + +const EntityIcon16 = () => { + return ; +}; + +const EntityIcon32 = () => { + return ( + + ); +}; + +export { EntityIcon, EntityIcon16, EntityIcon32 }; diff --git a/ui/src/graphics/FeastIconBlue.tsx b/ui/src/graphics/FeastIconBlue.tsx new file mode 100644 index 0000000000..7bd99226eb --- /dev/null +++ b/ui/src/graphics/FeastIconBlue.tsx @@ -0,0 +1,18 @@ +import React from "react"; + +const FeastIconBlue = () => { + return ( + + + + + ); +}; + +export default FeastIconBlue; diff --git a/ui/src/graphics/FeastWordMark.tsx b/ui/src/graphics/FeastWordMark.tsx new file mode 100644 index 0000000000..576e416bd6 --- /dev/null +++ b/ui/src/graphics/FeastWordMark.tsx @@ -0,0 +1,26 @@ +import React from "react"; + +const FeastWordMark = () => { + return ( + + + + + + + ); +}; + +export default FeastWordMark; diff --git a/ui/src/graphics/FeatureServiceIcon.tsx b/ui/src/graphics/FeatureServiceIcon.tsx new file mode 100644 index 0000000000..04c4de9cd6 --- /dev/null +++ b/ui/src/graphics/FeatureServiceIcon.tsx @@ -0,0 +1,42 @@ +import React from "react"; + +const FeatureServiceIcon = ({ + size, + className, +}: { + size: number; + className?: string; +}) => { + return ( + + + + ); +}; + +const FeatureServiceIcon16 = () => { + return ( + + ); +}; + +const FeatureServiceIcon32 = () => { + return ( + + ); +}; + +export { FeatureServiceIcon, FeatureServiceIcon16, FeatureServiceIcon32 }; diff --git a/ui/src/graphics/FeatureViewIcon.tsx b/ui/src/graphics/FeatureViewIcon.tsx new file mode 100644 index 0000000000..f536953b11 --- /dev/null +++ b/ui/src/graphics/FeatureViewIcon.tsx @@ -0,0 +1,54 @@ +import React from "react"; + +const FeatureViewIcon = ({ + size, + className, +}: { + size: number; + className?: string; +}) => { + return ( + + + + + + ); +}; + +const FeatureViewIcon16 = () => { + return ; +}; + +const FeatureViewIcon32 = () => { + return ( + + ); +}; + +export { FeatureViewIcon, FeatureViewIcon16, FeatureViewIcon32 }; diff --git a/ui/src/hacks/RouteAdapter.ts b/ui/src/hacks/RouteAdapter.ts new file mode 100644 index 0000000000..e7743c9d90 --- /dev/null +++ b/ui/src/hacks/RouteAdapter.ts @@ -0,0 +1,39 @@ +import React from "react"; +import { Location } from "history"; +import { + useLocation, + useNavigate, + Location as RouterLocation, +} from "react-router-dom"; + +// via: https://github.com/pbeshai/use-query-params/issues/196#issuecomment-996893750 +interface RouteAdapterProps { + children: React.FunctionComponent<{ + history: { + replace(location: Location): void; + push(location: Location): void; + }; + location: RouterLocation; + }>; +} + +// Via: https://github.com/pbeshai/use-query-params/blob/cd44e7fb3394620f757bfb09ff57b7f296d9a5e6/examples/react-router-6/src/index.js#L36 +const RouteAdapter = ({ children }: RouteAdapterProps) => { + const navigate = useNavigate(); + const location = useLocation(); + + const adaptedHistory = React.useMemo( + () => ({ + replace(location: Location) { + navigate(location, { replace: true, state: location.state }); + }, + push(location: Location) { + navigate(location, { replace: false, state: location.state }); + }, + }), + [navigate] + ); + return children && children({ history: adaptedHistory, location }); +}; + +export default RouteAdapter; diff --git a/ui/src/hooks/encodeSearchQueryString.ts b/ui/src/hooks/encodeSearchQueryString.ts new file mode 100644 index 0000000000..547db30d37 --- /dev/null +++ b/ui/src/hooks/encodeSearchQueryString.ts @@ -0,0 +1,18 @@ +import { StringParam } from "use-query-params"; +import { encodeQueryParams } from "serialize-query-params"; +import { stringify } from "query-string"; + +const encodeSearchQueryString = (query: string) => { + return stringify( + encodeQueryParams( + { + tags: StringParam, + }, + { + tags: query, + } + ) + ); +}; + +export { encodeSearchQueryString }; diff --git a/ui/src/hooks/useDocumentTitle.ts b/ui/src/hooks/useDocumentTitle.ts new file mode 100644 index 0000000000..42fde3be11 --- /dev/null +++ b/ui/src/hooks/useDocumentTitle.ts @@ -0,0 +1,18 @@ +import { useEffect, useState } from "react"; + +const useDocumentTitle = (title: string) => { + const [document_title, set] = useState(title); + useEffect(() => { + document.title = document_title; + }, [document_title]); + + const setDoucmentTitle = (newTitle: string) => { + if (document_title !== newTitle) { + set(newTitle); + } + }; + + return setDoucmentTitle; +}; + +export { useDocumentTitle }; diff --git a/ui/src/hooks/useFCOExploreSuggestions.ts b/ui/src/hooks/useFCOExploreSuggestions.ts new file mode 100644 index 0000000000..5767f73bed --- /dev/null +++ b/ui/src/hooks/useFCOExploreSuggestions.ts @@ -0,0 +1,133 @@ +import { encodeSearchQueryString } from "./encodeSearchQueryString"; +import { FEAST_FCO_TYPES } from "../parsers/types"; + +import { FeastFeatureViewType } from "../parsers/feastFeatureViews"; +import { useParams } from "react-router-dom"; +import { useFeatureViewTagsAggregation } from "./useTagsAggregation"; + +interface ExplorationSuggestionItem { + name: string; + link: string; + label: string; + count: number; +} + +interface ExplorationSuggestion { + title: string; + items: ExplorationSuggestionItem[]; +} + +const FCO_TO_URL_NAME_MAP: Record = { + dataSource: "/data-source", + entity: "/entity", + featureView: "/feature-view", + featureService: "/feature-service", +}; + +const createSearchLink = ( + FCOType: FEAST_FCO_TYPES, + key: string, + value: string +) => { + const URL = FCO_TO_URL_NAME_MAP[FCOType]; + + return URL + "?" + encodeSearchQueryString(`${key}:${value}`); +}; + +const NUMBER_OF_SUGGESTION_GROUPS = 2; +const NUMBER_OF_VALUES_PER_GROUP = 4; + +const sortTagByUniqueValues = ( + tagAggregation: Record> +) => { + return Object.entries(tagAggregation).sort( + ([a, valuesOfA], [b, valuesOfB]) => { + return Object.keys(valuesOfB).length - Object.keys(valuesOfA).length; + } + ); +}; + +const sortTagsByTotalUsage = ( + tagAggregation: Record> +) => { + return Object.entries(tagAggregation).sort( + ([a, valuesOfA], [b, valuesOfB]) => { + const countOfA = Object.values(valuesOfA).reduce((memo, current) => { + return memo + current.length; + }, 0); + + const countOfB = Object.values(valuesOfB).reduce((memo, current) => { + return memo + current.length; + }, 0); + + return countOfB - countOfA; + } + ); +}; + +const generateExplorationSuggestions = ( + tagAggregation: Record>, + projectName: string +) => { + const suggestions: ExplorationSuggestion[] = []; + + if (tagAggregation) { + const SortedCandidates = + sortTagByUniqueValues(tagAggregation); + + SortedCandidates.slice(0, NUMBER_OF_SUGGESTION_GROUPS).forEach( + ([selectedTag, selectedTagValuesMap]) => { + suggestions.push({ + title: `Feature Views by "${selectedTag}"`, + items: Object.entries(selectedTagValuesMap) + .sort(([a, entriesOfA], [b, entriesOfB]) => { + return entriesOfB.length - entriesOfA.length; + }) + .slice(0, NUMBER_OF_VALUES_PER_GROUP) + .map(([tagValue, fvEntries]) => { + return { + name: tagValue, + link: + `/p/${projectName}` + + createSearchLink( + FEAST_FCO_TYPES["featureView"], + selectedTag, + tagValue + ), + label: `Feature Services where ${selectedTag} is '${tagValue}'`, + count: fvEntries.length, + }; + }), + }); + } + ); + } + + return suggestions; +}; + +const useFCOExploreSuggestions = () => { + const query = useFeatureViewTagsAggregation(); + const tagAggregation = query.data; + + const { projectName } = useParams(); + + let data: ExplorationSuggestion[] | undefined = undefined; + + if (query.isSuccess && tagAggregation && projectName) { + data = generateExplorationSuggestions(tagAggregation, projectName); + } + + return { + ...query, + data, + }; +}; + +export default useFCOExploreSuggestions; +export { + generateExplorationSuggestions, + sortTagByUniqueValues, + sortTagsByTotalUsage, +}; +export type { ExplorationSuggestion }; diff --git a/ui/src/hooks/useMatchSubpath.ts b/ui/src/hooks/useMatchSubpath.ts new file mode 100644 index 0000000000..4aabf53bc8 --- /dev/null +++ b/ui/src/hooks/useMatchSubpath.ts @@ -0,0 +1,15 @@ +import { useResolvedPath, useMatch } from "react-router-dom"; + +const useMatchSubpath = (to: string) => { + const resolved = useResolvedPath(to); + + return useMatch({ path: resolved.pathname, end: false }) !== null; +}; + +const useMatchExact = (to: string) => { + const resolved = useResolvedPath(to); + + return useMatch({ path: resolved.pathname, end: true }) !== null; +}; + +export { useMatchSubpath, useMatchExact }; diff --git a/ui/src/hooks/useSearchInputWithTags.ts b/ui/src/hooks/useSearchInputWithTags.ts new file mode 100644 index 0000000000..6683e65e00 --- /dev/null +++ b/ui/src/hooks/useSearchInputWithTags.ts @@ -0,0 +1,305 @@ +import { useState } from "react"; + +import { useQueryParam, StringParam } from "use-query-params"; + +import { sortTagsByTotalUsage } from "./useFCOExploreSuggestions"; + +type tagTokenType = Record<"key" | "value", string>; +type tagTokenGroupsType = Record; +type tagAggregationRecordType = + | Record> + | undefined; + +type SuggestionModes = "KEY" | "VALUE"; + +interface filterInputInterface { + tagTokenGroups: tagTokenGroupsType; + searchTokens: string[]; +} + +interface TagSuggestionInstance { + suggestion: string; + description: string; +} + +const generateEmptyStateSuggestions = ( + tagsAggregationData: tagAggregationRecordType +) => { + if (tagsAggregationData) { + return sortTagsByTotalUsage(tagsAggregationData).map( + ([tagKey, recordOfTagValues]) => { + const uniqueValues = Object.keys(recordOfTagValues).length; + const totalEntries = Object.values(recordOfTagValues).reduce( + (memo, current) => { + return memo + current.length; + }, + 0 + ); + + return { + suggestion: tagKey, + description: `${uniqueValues} unique tag values. ${totalEntries} total entries.`, + }; + } + ); + } else { + return []; + } +}; + +const generateTagKeySuggestions = ( + input: string, + tagsAggregationData: tagAggregationRecordType +) => { + if (tagsAggregationData) { + return Object.entries(tagsAggregationData) + .filter(([potentialTagKey, summary]) => { + return potentialTagKey.indexOf(input) >= 0; + }) + .map(([potentialTagKey, summary]) => { + const tagValueVariants = Object.entries(summary); + + return { + suggestion: potentialTagKey, + description: `${tagValueVariants.length} different tag values`, + }; + }); + } else { + return []; + } +}; + +const generateTagValueSuggestions = ( + input: string, + tagsAggregationData: tagAggregationRecordType +) => { + if (tagsAggregationData) { + const [currentTagKey, currentTagValue] = input.split(":"); + const entriesWithTagKey = tagsAggregationData[currentTagKey]; + + const summarizeCallback = (entry: unknown[]) => { + const potentialTagKey = entry[0] as string; + const summary = entry[1] as unknown[]; + + return { + suggestion: potentialTagKey, + description: `${summary.length} entries`, + }; + }; + + if (entriesWithTagKey) { + if (currentTagValue && currentTagValue.length > 0) { + return Object.entries(entriesWithTagKey) + .filter(([potentialTagValue, entries]) => { + return ( + potentialTagValue.indexOf(currentTagValue) >= 0 // && + // potentialTagValue !== currentTagValue // Don't show exact matches, b/c that means we probably already selected it + ); + }) + .map(summarizeCallback); + } else { + return Object.entries(entriesWithTagKey).map(summarizeCallback); + } + } else { + return []; + } + } else { + return []; + } +}; + +function getAllSpacePositions(s: string) { + const indices: number[] = []; + while ( + s.indexOf(" ", indices.length ? indices[indices.length - 1] + 1 : 0) !== + -1 && + indices.length < 100 + ) { + const position = indices[indices.length - 1] || 0; + const index = s.indexOf(" ", position + 1); + indices.push(index); + } + return indices; +} + +interface TagSplitterReturnInterface { + chunks: string[]; + tokenInFocusIndex: number; + currentTag: string; +} + +const parseTokenInput = ( + cursorPosition: number | undefined, + tagsString: string +): TagSplitterReturnInterface => { + // Get where the spaces in the tagString, plus a start and end value + // e.g. "A:a B:b" would return + // [0, 3, 7] + const chunks = tagsString.split(" "); + + const allSpacePositions = [0] + .concat(getAllSpacePositions(tagsString)) + .concat(tagsString.length + 1); + + let tokenInFocusIndex = 0; + if (cursorPosition) { + tokenInFocusIndex = allSpacePositions.findIndex((value, index) => { + return ( + cursorPosition >= value && + cursorPosition <= allSpacePositions[index + 1] + ); + }); + } + + const currentTag = chunks[tokenInFocusIndex] || ""; + + return { + currentTag, + chunks, + tokenInFocusIndex, + }; +}; + +const useSearchQuery = () => { + const [query, setQuery] = useQueryParam("q", StringParam); + const searchString = query || ""; + + const searchTokens = searchString.split(" ").filter((t) => t.length >= 3); + + const setSearchString = (d: string) => { + setQuery(d); + }; + + return { + searchString, + searchTokens, + setSearchString, + }; +}; + +const useTagsWithSuggestions = ( + tagsAggregationData: tagAggregationRecordType +) => { + const [rawtagsString, setTagsStringParam] = useQueryParam( + "tags", + StringParam + ); + + const tagsString = rawtagsString || ""; + + // Spaces in the beginning of the string + // really messes with parseTokenInput(). Just prevent it. + const setTagsString = (s: string) => { + setTagsStringParam(s.trimStart()); + }; + + const [cursorPosition, setCursor] = useState(undefined); + const setCursorPosition = (position: number | undefined) => { + setCursor(position); + }; + + // Parse input into tokens, and detect which token + // we are focused on given the current cursor position + const { chunks, tokenInFocusIndex, currentTag } = parseTokenInput( + cursorPosition, + tagsString + ); + + const suggestionMode: SuggestionModes = + currentTag.indexOf(":") < 0 ? "KEY" : "VALUE"; + + let tagSuggestions: TagSuggestionInstance[] = []; + if (tagsAggregationData) { + if (currentTag.length > 0) { + if (suggestionMode === "KEY") { + tagSuggestions = generateTagKeySuggestions( + currentTag, + tagsAggregationData + ); + } else { + tagSuggestions = generateTagValueSuggestions( + currentTag, + tagsAggregationData + ); + } + } else { + // Current Tag is empty + tagSuggestions = generateEmptyStateSuggestions(tagsAggregationData); + } + } + + // Helper method for accepting suggestions + const setSuggestionAtPositionInTagsString = ( + suggestion: string, + position: number + ) => { + const nextTagsTokens = chunks.slice(0); + nextTagsTokens[position] = suggestion; + + setTagsString(nextTagsTokens.join(" ")); + }; + + const acceptSuggestion = (suggestion: TagSuggestionInstance) => { + if (suggestionMode === "KEY") { + const newKeyText = suggestion.suggestion + ":"; + + setSuggestionAtPositionInTagsString(newKeyText, tokenInFocusIndex); + } else { + const [currentTagKey] = currentTag.split(":"); + + const newTagText = `${currentTagKey}:` + suggestion.suggestion; + + setSuggestionAtPositionInTagsString(newTagText, tokenInFocusIndex); + } + }; + + const tagKeysSet = new Set(); + + const tagTokens: tagTokenType[] = chunks + .filter((chunk: string) => { + return chunk.indexOf(":") > 0; + }) + .map((chunk) => { + const parts = chunk.split(":"); + tagKeysSet.add(parts[0]); + + return { + key: parts[0], + value: parts[1], + }; + }); + + const tagTokenGroups = tagTokens.reduce( + (memo: Record, current) => { + if (memo[current.key]) { + memo[current.key].push(current.value); + } else { + memo[current.key] = [current.value]; + } + + return memo; + }, + {} + ); + + return { + setCursorPosition, + currentTag, + tagsString, + setTagsString, + tagTokens, + tagTokenGroups, + tagKeysSet, // Used to determine which columns to add to search results, + suggestionMode, + tagSuggestions, + acceptSuggestion, + }; +}; + +export { useTagsWithSuggestions, useSearchQuery }; +export type { + filterInputInterface, + tagTokenGroupsType, + TagSuggestionInstance, + SuggestionModes, +}; diff --git a/ui/src/hooks/useTagsAggregation.ts b/ui/src/hooks/useTagsAggregation.ts new file mode 100644 index 0000000000..21480fa8b0 --- /dev/null +++ b/ui/src/hooks/useTagsAggregation.ts @@ -0,0 +1,85 @@ +import { useContext, useMemo } from "react"; +import RegistryPathContext from "../contexts/RegistryPathContext"; +import { FeastFeatureServiceType } from "../parsers/feastFeatureServices"; +import { FeastFeatureViewType } from "../parsers/feastFeatureViews"; +import useLoadRegistry from "../queries/useLoadRegistry"; + +// Usage of generic type parameter T +// https://stackoverflow.com/questions/53203409/how-to-tell-typescript-that-im-returning-an-array-of-arrays-of-the-input-type +const buildTagCollection = ( + array: T[], + recordExtractor: (unknownFCO: T) => Record | undefined // Assumes that tags are always a Record +): Record> => { + const tagCollection = array.reduce( + (memo: Record>, fco: T) => { + const tags = recordExtractor(fco); + + if (tags) { + Object.entries(tags).forEach(([tagKey, tagValue]) => { + if (!memo[tagKey]) { + memo[tagKey] = { + [tagValue]: [fco], + }; + } else { + if (!memo[tagKey][tagValue]) { + memo[tagKey][tagValue] = [fco]; + } else { + memo[tagKey][tagValue].push(fco); + } + } + }); + } + + return memo; + }, + {} + ); + + return tagCollection; +}; + +const useFeatureViewTagsAggregation = () => { + const registryUrl = useContext(RegistryPathContext); + const query = useLoadRegistry(registryUrl); + + const data = useMemo(() => { + return query.data && query.data.objects && query.data.objects.featureViews + ? buildTagCollection( + query.data.objects.featureViews, + (fv) => { + return fv.spec.tags; + } + ) + : undefined; + }, [query.data]); + + return { + ...query, + data, + }; +}; + +const useFeatureServiceTagsAggregation = () => { + const registryUrl = useContext(RegistryPathContext); + const query = useLoadRegistry(registryUrl); + + const data = useMemo(() => { + return query.data && + query.data.objects && + query.data.objects.featureServices + ? buildTagCollection( + query.data.objects.featureServices, + (fs) => { + return fs.spec.tags; + } + ) + : undefined; + }, [query.data]); + + return { + ...query, + data, + }; +}; + +export { useFeatureViewTagsAggregation, useFeatureServiceTagsAggregation }; diff --git a/ui/src/index.css b/ui/src/index.css new file mode 100644 index 0000000000..46ca3ba295 --- /dev/null +++ b/ui/src/index.css @@ -0,0 +1,25 @@ +@supports (font-variation-settings: normal) { + html { + font-family: "Inter var", "system-ui"; + } +} + +html { + background: url("assets/feast-icon-grey.svg") no-repeat -6vh 56vh; + background-size: 50vh; + background-attachment: fixed; +} + +body { + margin: 0; + font-family: "Inter", -apple-system, BlinkMacSystemFont, "Segoe UI", "Roboto", + "Oxygen", "Ubuntu", "Cantarell", "Fira Sans", "Droid Sans", "Helvetica Neue", + sans-serif; + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; +} + +code { + font-family: source-code-pro, Menlo, Monaco, Consolas, "Courier New", + monospace; +} diff --git a/ui/src/index.tsx b/ui/src/index.tsx new file mode 100644 index 0000000000..d4f1013503 --- /dev/null +++ b/ui/src/index.tsx @@ -0,0 +1,82 @@ +import React from "react"; +import ReactDOM from "react-dom"; +import { QueryClient } from "react-query"; +import FeastUI from "./FeastUI"; + +// How to add a Custom Tab +// 1. Pick which object type you want your tab +// to be in. e.g. Feature View, Feature Service, etc. +// +// 2. Write a regular React Component for Tab Content. +// It will be passed props with data about the Feast FCO +// e.g. RegularFeatureViewCustomTabProps, FeatureServiceCustomTabProps +// See: types.ts in this folder +// +// 3. Register the tab in the appropriate array below. Each entry +// is a record with three keys: label, path, and Component. +// Import your component and pass it as Component +import RFVDemoCustomTab from "./custom-tabs/reguar-fv-demo-tab/DemoCustomTab"; +import ODFVDemoCustomTab from "./custom-tabs/ondemand-fv-demo-tab/DemoCustomTab"; +import FSDemoCustomTab from "./custom-tabs/feature-service-demo-tab/DemoCustomTab"; +import DSDemoCustomTab from "./custom-tabs/data-source-demo-tab/DemoCustomTab"; +import EntDemoCustomTab from "./custom-tabs/entity-demo-tab/DemoCustomTab"; +import DatasetDemoCustomTab from "./custom-tabs/dataset-demo-tab/DemoCustomTab"; + +const queryClient = new QueryClient(); + +const tabsRegistry = { + RegularFeatureViewCustomTabs: [ + { + label: "Custom Tab Demo", // Navigation Label for the tab + path: "demo-tab", // Subpath for the tab + Component: RFVDemoCustomTab, + }, + ], + OnDemandFeatureViewCustomTabs: [ + { + label: "Custom Tab Demo", + path: "demo-tab", + Component: ODFVDemoCustomTab, + }, + ], + FeatureServiceCustomTabs: [ + { + label: "Custom Tab Demo", + path: "fs-demo-tab", + Component: FSDemoCustomTab, + }, + ], + DataSourceCustomTabs: [ + { + label: "Custom Tab Demo", + path: "fs-demo-tab", + Component: DSDemoCustomTab, + }, + ], + EntityCustomTabs: [ + { + label: "Custom Tab Demo", + path: "demo-tab", + Component: EntDemoCustomTab, + }, + ], + DatasetCustomTabs: [ + { + label: "Custom Tab Demo", + path: "demo-tab", + Component: DatasetDemoCustomTab, + }, + ], +}; + +ReactDOM.render( + + + , + document.getElementById("root") +); diff --git a/ui/src/mocks/handlers.ts b/ui/src/mocks/handlers.ts new file mode 100644 index 0000000000..e7b0040f0d --- /dev/null +++ b/ui/src/mocks/handlers.ts @@ -0,0 +1,29 @@ +import { rest } from "msw"; +import registry from "../../public/registry.json"; + +const projectsListWithDefaultProject = rest.get( + "/projects-list.json", + (req, res, ctx) => { + return res( + ctx.status(200), + ctx.json({ + default: "credit_score_project", + projects: [ + { + name: "Credit Score Project", + description: + "Project for credit scoring team and associated models.", + id: "credit_score_project", + registryPath: "/registry.json", + }, + ], + }) + ); + } +); + +const creditHistoryRegistry = rest.get("/registry.json", (req, res, ctx) => { + return res(ctx.status(200), ctx.json(registry)); +}); + +export { projectsListWithDefaultProject, creditHistoryRegistry }; diff --git a/ui/src/pages/Layout.tsx b/ui/src/pages/Layout.tsx new file mode 100644 index 0000000000..ff56414f35 --- /dev/null +++ b/ui/src/pages/Layout.tsx @@ -0,0 +1,65 @@ +import React from "react"; + +import { + EuiPage, + EuiPageSideBar, + EuiPageBody, + EuiErrorBoundary, + EuiHorizontalRule, + EuiSpacer, +} from "@elastic/eui"; +import { Outlet } from "react-router-dom"; + +import RegistryPathContext from "../contexts/RegistryPathContext"; +import { useParams } from "react-router-dom"; +import { useLoadProjectsList } from "../contexts/ProjectListContext"; + +import ProjectSelector from "../components/ProjectSelector"; +import Sidebar from "./Sidebar"; +import FeastWordMark from "../graphics/FeastWordMark"; + +const Layout = () => { + // Registry Path Context has to be inside Layout + // because it has to be under routes + // in order to use useParams + let { projectName } = useParams(); + + const { data } = useLoadProjectsList(); + + const currentProject = data?.projects.find((project) => { + return project.id === projectName; + }); + + const registryPath = currentProject?.registryPath || ""; + + return ( + + + + + + + {registryPath && ( + + + + + )} + + + + + + + + + + ); +}; + +export default Layout; diff --git a/ui/src/pages/NoMatch.tsx b/ui/src/pages/NoMatch.tsx new file mode 100644 index 0000000000..6e2461ee61 --- /dev/null +++ b/ui/src/pages/NoMatch.tsx @@ -0,0 +1,7 @@ +import React from "react"; + +const NoMatch = () => { + return
404
; +}; + +export default NoMatch; diff --git a/ui/src/pages/ProjectOverviewPage.tsx b/ui/src/pages/ProjectOverviewPage.tsx new file mode 100644 index 0000000000..854af49375 --- /dev/null +++ b/ui/src/pages/ProjectOverviewPage.tsx @@ -0,0 +1,99 @@ +import React, { useContext } from "react"; + +import { + EuiPageContent, + EuiPageContentBody, + EuiText, + EuiFlexGroup, + EuiFlexItem, + EuiTitle, + EuiSpacer, + EuiLoadingContent, + EuiEmptyPrompt, +} from "@elastic/eui"; + +import { useDocumentTitle } from "../hooks/useDocumentTitle"; +import ObjectsCountStats from "../components/ObjectsCountStats"; +import ExplorePanel from "../components/ExplorePanel"; +import useLoadRegistry from "../queries/useLoadRegistry"; +import RegistryPathContext from "../contexts/RegistryPathContext"; + +const ProjectOverviewPage = () => { + useDocumentTitle("Feast Home"); + const registryUrl = useContext(RegistryPathContext); + const { isLoading, isSuccess, isError, data } = useLoadRegistry(registryUrl); + + return ( + + + +

+ {isLoading && } + {isSuccess && data?.project && `Project: ${data.project}`} +

+
+ + + + + {isLoading && } + {isError && ( + Error Loading Project Configs} + body={ +

+ There was an error loading the Project Configurations. + Please check that feature_store.yaml file is + available and well-formed. +

+ } + /> + )} + {isSuccess && + (data?.description ? ( + +
{data.description}
+
+ ) : ( + +

+ Welcome to your new Feast project. In this UI, you can see + Data Sources, Entities, Feature Views and Feature Services + registered in Feast. +

+

+ It looks like this project already has some objects + registered. If you are new to this project, we suggest + starting by exploring the Feature Services, as they + represent the collection of Feature Views serving a + particular model. +

+

+ Note: We encourage you to replace this + welcome message with more suitable content for your team. + You can do so by specifying a{" "} + project_description in your{" "} + feature_store.yaml file. +

+
+ ))} + +
+ + + +
+
+
+ ); +}; + +export default ProjectOverviewPage; diff --git a/ui/src/pages/RootProjectSelectionPage.tsx b/ui/src/pages/RootProjectSelectionPage.tsx new file mode 100644 index 0000000000..424e93c85d --- /dev/null +++ b/ui/src/pages/RootProjectSelectionPage.tsx @@ -0,0 +1,76 @@ +import React, { useEffect } from "react"; +import { + EuiCard, + EuiFlexGrid, + EuiFlexItem, + EuiIcon, + EuiLoadingContent, + EuiPageContent, + EuiPageContentBody, + EuiText, + EuiTitle, + EuiHorizontalRule, +} from "@elastic/eui"; +import { useLoadProjectsList } from "../contexts/ProjectListContext"; +import { useNavigate } from "react-router-dom"; +import FeastIconBlue from "../graphics/FeastIconBlue"; + +const RootProjectSelectionPage = () => { + const { isLoading, isSuccess, data } = useLoadProjectsList(); + const navigate = useNavigate(); + + useEffect(() => { + if (data && data.default) { + // If a default is set, redirect there. + navigate(`/p/${data.default}`); + } + + if (data && data.projects.length === 1) { + // If there is only one project, redirect there. + navigate(`/p/${data.projects[0].id}`); + } + }, [data, navigate]); + + const projectCards = data?.projects.map((item, index) => { + return ( + + } + title={`${item.name}`} + description={item?.description || ""} + onClick={() => { + navigate(`/p/${item.id}`); + }} + /> + + ); + }); + + return ( + + + +

Welcome to Feast

+
+ +

Select one of the projects.

+
+ + {isLoading && } + {isSuccess && data?.projects && ( + + {projectCards} + + )} +
+
+ ); +}; + +export default RootProjectSelectionPage; diff --git a/ui/src/pages/Sidebar.tsx b/ui/src/pages/Sidebar.tsx new file mode 100644 index 0000000000..9fc1a532f2 --- /dev/null +++ b/ui/src/pages/Sidebar.tsx @@ -0,0 +1,127 @@ +import React, { useContext, useState } from "react"; + +import { EuiIcon, EuiSideNav, htmlIdGenerator } from "@elastic/eui"; +import { useNavigate, useParams } from "react-router-dom"; +import { useMatchSubpath } from "../hooks/useMatchSubpath"; +import useLoadRegistry from "../queries/useLoadRegistry"; +import RegistryPathContext from "../contexts/RegistryPathContext"; + +import { DataSourceIcon16 } from "../graphics/DataSourceIcon"; +import { EntityIcon16 } from "../graphics/EntityIcon"; +import { FeatureViewIcon16 } from "../graphics/FeatureViewIcon"; +import { FeatureServiceIcon16 } from "../graphics/FeatureServiceIcon"; +import { DatasetIcon16 } from "../graphics/DatasetIcon"; + +const SideNav = () => { + const registryUrl = useContext(RegistryPathContext); + const { isSuccess, data } = useLoadRegistry(registryUrl); + const { projectName } = useParams(); + + const [isSideNavOpenOnMobile, setisSideNavOpenOnMobile] = useState(false); + + const navigate = useNavigate(); + + const toggleOpenOnMobile = () => { + setisSideNavOpenOnMobile(!isSideNavOpenOnMobile); + }; + + const dataSourcesLabel = `Data Sources ${ + isSuccess && data?.objects.dataSources + ? `(${data?.objects.dataSources?.length})` + : "" + }`; + + const entitiesLabel = `Entities ${ + isSuccess && data?.objects.entities + ? `(${data?.objects.entities?.length})` + : "" + }`; + + const featureViewsLabel = `Feature Views ${ + isSuccess && data?.mergedFVList && data?.mergedFVList.length > 0 + ? `(${data?.mergedFVList.length})` + : "" + }`; + + const featureServicesLabel = `Feature Services ${ + isSuccess && data?.objects.featureServices + ? `(${data?.objects.featureServices?.length})` + : "" + }`; + + const savedDatasetsLabel = `Datasets ${ + isSuccess && data?.objects.savedDatasets + ? `(${data?.objects.savedDatasets?.length})` + : "" + }`; + + const sideNav = [ + { + name: "Home", + id: htmlIdGenerator("basicExample")(), + onClick: () => { + navigate(`/p/${projectName}/`); + }, + items: [ + { + name: dataSourcesLabel, + id: htmlIdGenerator("dataSources")(), + icon: , + onClick: () => { + navigate(`/p/${projectName}/data-source`); + }, + isSelected: useMatchSubpath("data-source"), + }, + { + name: entitiesLabel, + id: htmlIdGenerator("entities")(), + icon: , + onClick: () => { + navigate(`/p/${projectName}/entity`); + }, + isSelected: useMatchSubpath("entity"), + }, + { + name: featureViewsLabel, + id: htmlIdGenerator("featureView")(), + icon: , + onClick: () => { + navigate(`/p/${projectName}/feature-view`); + }, + isSelected: useMatchSubpath("feature-view"), + }, + { + name: featureServicesLabel, + id: htmlIdGenerator("featureService")(), + icon: , + onClick: () => { + navigate(`/p/${projectName}/feature-service`); + }, + isSelected: useMatchSubpath("feature-service"), + }, + { + name: savedDatasetsLabel, + id: htmlIdGenerator("savedDatasets")(), + icon: , + onClick: () => { + navigate(`/p/${projectName}/data-set`); + }, + isSelected: useMatchSubpath("data-set"), + }, + ], + }, + ]; + + return ( + toggleOpenOnMobile()} + isOpenOnMobile={isSideNavOpenOnMobile} + style={{ width: 192 }} + items={sideNav} + /> + ); +}; + +export default SideNav; diff --git a/ui/src/pages/data-sources/BatchSourcePropertiesView.tsx b/ui/src/pages/data-sources/BatchSourcePropertiesView.tsx new file mode 100644 index 0000000000..6d5e6ea86f --- /dev/null +++ b/ui/src/pages/data-sources/BatchSourcePropertiesView.tsx @@ -0,0 +1,116 @@ +import React from "react"; +import { + EuiCodeBlock, + EuiDescriptionList, + EuiDescriptionListDescription, + EuiDescriptionListTitle, + EuiFlexGroup, + EuiFlexItem, + EuiHorizontalRule, + EuiSpacer, + EuiTitle, + } from "@elastic/eui"; + +interface BatchSourcePropertiesViewProps { + batchSource: { + type?: string | undefined; + dataSourceClassType?: string | undefined; + fileOptions?: { + fileUrl?: string | undefined; + } | undefined; + meta?: { + latestEventTimestamp?: Date | undefined; + earliestEventTimestamp?: Date | undefined; + } | undefined; + bigqueryOptions?: { + dbtModelSerialized?: string | undefined + } | undefined; + } +} + +const BatchSourcePropertiesView = (props: BatchSourcePropertiesViewProps) => { + const batchSource = props.batchSource; + return ( + + + + + {(batchSource.dataSourceClassType || batchSource.type) && + + Source Type + {batchSource.dataSourceClassType ? ( + + {batchSource.dataSourceClassType + .split(".") + .at(-1)} + ) + : batchSource.type ? ( + {batchSource.type} + ) + : ""} + } + + {batchSource.fileOptions && ( + + + File URL + + + {batchSource.fileOptions + ? batchSource.fileOptions.fileUrl + : ""} + + + )} + {batchSource.meta?.latestEventTimestamp && ( + + + Latest Event + + + {batchSource.meta.latestEventTimestamp.toLocaleDateString( + "en-CA" + )} + + + )} + {batchSource.meta?.earliestEventTimestamp && ( + + + Earliest Event + + + {batchSource.meta.earliestEventTimestamp.toLocaleDateString( + "en-CA" + )} + + + )} + + + + {batchSource.bigqueryOptions?.dbtModelSerialized && ( + + + + )} + {batchSource.bigqueryOptions?.dbtModelSerialized && ( + + +

Dbt Transformation

+
+ + {batchSource.bigqueryOptions.dbtModelSerialized} + +
+ )} +
+
); +}; + +export default BatchSourcePropertiesView; diff --git a/ui/src/pages/data-sources/DataSourceDbt.tsx b/ui/src/pages/data-sources/DataSourceDbt.tsx new file mode 100644 index 0000000000..4e61ba8026 --- /dev/null +++ b/ui/src/pages/data-sources/DataSourceDbt.tsx @@ -0,0 +1,35 @@ +import React from "react"; +import { + EuiCodeBlock, + EuiPanel, + EuiHorizontalRule, + EuiTitle, +} from "@elastic/eui"; +import { useParams } from "react-router-dom"; +import useLoadDataSource from "./useLoadDataSource"; + +const DataSourceDbt = () => { + let { dataSourceName } = useParams(); + + const dsName = dataSourceName === undefined ? "" : dataSourceName; + + const { isSuccess, data } = useLoadDataSource(dsName); + + return isSuccess && data && data.bigqueryOptions ? ( + + +

Dbt Transformation

+
+ + + {data.bigqueryOptions.dbtModelSerialized} + +
+ ) : ( + + No data so sad + + ); +}; + +export default DataSourceDbt; diff --git a/ui/src/pages/data-sources/DataSourceIndexEmptyState.tsx b/ui/src/pages/data-sources/DataSourceIndexEmptyState.tsx new file mode 100644 index 0000000000..9398c105df --- /dev/null +++ b/ui/src/pages/data-sources/DataSourceIndexEmptyState.tsx @@ -0,0 +1,42 @@ +import React from "react"; +import { EuiEmptyPrompt, EuiTitle, EuiLink, EuiButton } from "@elastic/eui"; +import FeastIconBlue from "../../graphics/FeastIconBlue"; + +const DataSourceIndexEmptyState = () => { + return ( + There are no data sources} + body={ +

+ This project does not have any Data Sources. Learn more about creating + Data Sources in Feast Docs. +

+ } + actions={ + { + window.open( + "https://docs.feast.dev/getting-started/concepts/data-source", + "_blank" + ); + }} + > + Open Data Sources Docs + + } + footer={ + <> + +

Want to learn more?

+
+ + Read Feast documentation + + + } + /> + ); +}; + +export default DataSourceIndexEmptyState; diff --git a/ui/src/pages/data-sources/DataSourceInstance.tsx b/ui/src/pages/data-sources/DataSourceInstance.tsx new file mode 100644 index 0000000000..332f51e023 --- /dev/null +++ b/ui/src/pages/data-sources/DataSourceInstance.tsx @@ -0,0 +1,84 @@ +import React from "react"; +import { Route, Routes, useNavigate, useParams } from "react-router-dom"; +import { + EuiPageHeader, + EuiPageContent, + EuiPageContentBody, +} from "@elastic/eui"; + +import { DataSourceIcon32 } from "../../graphics/DataSourceIcon"; +import { useMatchExact, useMatchSubpath } from "../../hooks/useMatchSubpath"; +import { useDocumentTitle } from "../../hooks/useDocumentTitle"; +import DataSourceRawData from "./DataSourceRawData"; +import DataSourceOverviewTab from "./DataSourceOverviewTab"; +import DataSourceDbt from "./DataSourceDbt"; +import useLoadDataSource from "./useLoadDataSource"; + +import { + useDataSourceCustomTabs, + useDataSourceCustomTabRoutes, +} from "../../custom-tabs/TabsRegistryContext"; + +const DataSourceInstance = () => { + const navigate = useNavigate(); + let { dataSourceName } = useParams(); + + useDocumentTitle(`${dataSourceName} | Data Source | Feast`); + const dsName = dataSourceName === undefined ? "" : dataSourceName; + const { isSuccess, data } = useLoadDataSource(dsName); + + let tabs = [ + { + label: "Overview", + isSelected: useMatchExact(""), + onClick: () => { + navigate(""); + }, + }, + ]; + + const dbtTab = { + label: "Dbt Definition", + isSelected: useMatchSubpath("dbt"), + onClick: () => { + navigate("dbt"); + }, + }; + if (isSuccess && data?.bigqueryOptions?.dbtModelSerialized) { + tabs.push(dbtTab); + } + + const { customNavigationTabs } = useDataSourceCustomTabs(navigate); + tabs = tabs.concat(customNavigationTabs); + + const CustomTabRoutes = useDataSourceCustomTabRoutes(); + + return ( + + + + + + } /> + } /> + } /> + {CustomTabRoutes} + + + + + ); +}; + +export default DataSourceInstance; diff --git a/ui/src/pages/data-sources/DataSourceOverviewTab.tsx b/ui/src/pages/data-sources/DataSourceOverviewTab.tsx new file mode 100644 index 0000000000..f7c05000e7 --- /dev/null +++ b/ui/src/pages/data-sources/DataSourceOverviewTab.tsx @@ -0,0 +1,120 @@ +import { + EuiFlexGroup, + EuiHorizontalRule, + EuiLoadingSpinner, + EuiText, + EuiTitle, +} from "@elastic/eui"; +import { + EuiPanel, + EuiFlexItem, + EuiDescriptionList, + EuiDescriptionListTitle, + EuiDescriptionListDescription, + EuiSpacer, +} from "@elastic/eui"; +import React from "react"; +import { useParams } from "react-router-dom"; +import BatchSourcePropertiesView from "./BatchSourcePropertiesView"; +import FeatureViewEdgesList from "../entities/FeatureViewEdgesList"; +import RequestDataSourceSchemaTable from "./RequestDataSourceSchemaTable"; +import useLoadDataSource from "./useLoadDataSource"; + +const DataSourceOverviewTab = () => { + let { dataSourceName } = useParams(); + + const dsName = dataSourceName === undefined ? "" : dataSourceName; + const { isLoading, isSuccess, isError, data, consumingFeatureViews } = + useLoadDataSource(dsName); + const isEmpty = data === undefined; + console.log(consumingFeatureViews); + + return ( + + {isLoading && ( + + Loading + + )} + {isEmpty &&

No data source with name: {dataSourceName}

} + {isError &&

Error loading data source: {dataSourceName}

} + {isSuccess && data && ( + + + + + + + +

Properties

+
+ + {data.fileOptions || data.bigqueryOptions ? ( + + ) : data.requestDataOptions ? ( + + + + Source Type + + + {data.type} + + + + ): ( + "" + )} +
+
+
+ + + + {data.requestDataOptions ? ( + + +

Request Source Schema

+
+ + { + return { + fieldName: field, + valueType: type, + }; + })} + /> +
+ ) : ( + "" + )} +
+
+
+ + + +

Consuming Feature Views

+
+ + {consumingFeatureViews && consumingFeatureViews.length > 0 ? ( + { + return f.target.name; + })} + /> + ) : ( + No consuming feature views + )} +
+
+
+
+ )} +
+ ); +}; +export default DataSourceOverviewTab; diff --git a/ui/src/pages/data-sources/DataSourceRawData.tsx b/ui/src/pages/data-sources/DataSourceRawData.tsx new file mode 100644 index 0000000000..ad4c7484a3 --- /dev/null +++ b/ui/src/pages/data-sources/DataSourceRawData.tsx @@ -0,0 +1,24 @@ +import React from "react"; +import { EuiPanel } from "@elastic/eui"; +import { useParams } from "react-router-dom"; +import useLoadDataSource from "./useLoadDataSource"; + +const DataSourceRawData = () => { + let { dataSourceName } = useParams(); + + const dsName = dataSourceName === undefined ? "" : dataSourceName; + + const { isSuccess, data } = useLoadDataSource(dsName); + + return isSuccess && data ? ( + +
{JSON.stringify(data, null, 2)}
+
+ ) : ( + + No data so sad + + ); +}; + +export default DataSourceRawData; diff --git a/ui/src/pages/data-sources/DataSourcesListingTable.tsx b/ui/src/pages/data-sources/DataSourcesListingTable.tsx new file mode 100644 index 0000000000..661d18c7e9 --- /dev/null +++ b/ui/src/pages/data-sources/DataSourcesListingTable.tsx @@ -0,0 +1,54 @@ +import React from "react"; +import { EuiBasicTable } from "@elastic/eui"; +import EuiCustomLink from "../../components/EuiCustomLink"; +import { FeastDatasourceType } from "../../parsers/feastDatasources"; +import { useParams } from "react-router-dom"; + +interface DatasourcesListingTableProps { + dataSources: FeastDatasourceType[]; +} + +const DatasourcesListingTable = ({ + dataSources, +}: DatasourcesListingTableProps) => { + const { projectName } = useParams(); + + const columns = [ + { + name: "Name", + field: "name", + sortable: true, + render: (name: string) => { + return ( + + {name} + + ); + }, + }, + { + name: "Type", + field: "type", + sortable: true, + }, + ]; + + const getRowProps = (item: FeastDatasourceType) => { + return { + "data-test-subj": `row-${item.name}`, + }; + }; + + return ( + + ); +}; + +export default DatasourcesListingTable; diff --git a/ui/src/pages/data-sources/Index.tsx b/ui/src/pages/data-sources/Index.tsx new file mode 100644 index 0000000000..81e3a96702 --- /dev/null +++ b/ui/src/pages/data-sources/Index.tsx @@ -0,0 +1,66 @@ +import React, { useContext } from "react"; + +import { + EuiPageHeader, + EuiPageContent, + EuiPageContentBody, + EuiLoadingSpinner, +} from "@elastic/eui"; + +import useLoadRegistry from "../../queries/useLoadRegistry"; +import DatasourcesListingTable from "./DataSourcesListingTable"; +import { useDocumentTitle } from "../../hooks/useDocumentTitle"; +import RegistryPathContext from "../../contexts/RegistryPathContext"; +import DataSourceIndexEmptyState from "./DataSourceIndexEmptyState"; +import { DataSourceIcon32 } from "../../graphics/DataSourceIcon"; + +const useLoadDatasources = () => { + const registryUrl = useContext(RegistryPathContext); + const registryQuery = useLoadRegistry(registryUrl); + + const data = + registryQuery.data === undefined + ? undefined + : registryQuery.data.objects.dataSources; + + return { + ...registryQuery, + data, + }; +}; + +const Index = () => { + const { isLoading, isSuccess, isError, data } = useLoadDatasources(); + + useDocumentTitle(`Data Sources | Feast`); + + return ( + + + + + {isLoading && ( +

+ Loading +

+ )} + {isError &&

We encountered an error while loading.

} + {isSuccess && !data && } + {isSuccess && data && } +
+
+
+ ); +}; + +export default Index; diff --git a/ui/src/pages/data-sources/RequestDataSourceSchemaTable.tsx b/ui/src/pages/data-sources/RequestDataSourceSchemaTable.tsx new file mode 100644 index 0000000000..60ef4c406a --- /dev/null +++ b/ui/src/pages/data-sources/RequestDataSourceSchemaTable.tsx @@ -0,0 +1,38 @@ +import React from "react"; +import { EuiBasicTable } from "@elastic/eui"; +import { FEAST_FEATURE_VALUE_TYPES } from "../../parsers/types"; + +interface RequestDataSourceSchemaField { + fieldName: string; + valueType: FEAST_FEATURE_VALUE_TYPES; +} + +interface RequestDataSourceSchema { + fields: RequestDataSourceSchemaField[]; +} + +const RequestDataSourceSchemaTable = ({ fields }: RequestDataSourceSchema) => { + console.log(fields); + const columns = [ + { + name: "Field", + field: "fieldName", + }, + { + name: "Value Type", + field: "valueType", + }, + ]; + + const getRowProps = (item: RequestDataSourceSchemaField) => { + return { + "data-test-subj": `row-${item.fieldName}`, + }; + }; + + return ( + + ); +}; + +export default RequestDataSourceSchemaTable; diff --git a/ui/src/pages/data-sources/useLoadDataSource.ts b/ui/src/pages/data-sources/useLoadDataSource.ts new file mode 100644 index 0000000000..0a589df7ef --- /dev/null +++ b/ui/src/pages/data-sources/useLoadDataSource.ts @@ -0,0 +1,35 @@ +import { useContext } from "react"; +import RegistryPathContext from "../../contexts/RegistryPathContext"; +import { FEAST_FCO_TYPES } from "../../parsers/types"; +import useLoadRegistry from "../../queries/useLoadRegistry"; + +const useLoadDataSource = (dataSourceName: string) => { + const registryUrl = useContext(RegistryPathContext); + const registryQuery = useLoadRegistry(registryUrl); + + const data = + registryQuery.data === undefined + ? undefined + : registryQuery.data.objects.dataSources?.find( + (ds) => ds.name === dataSourceName + ); + + const consumingFeatureViews = + registryQuery.data === undefined + ? undefined + : registryQuery.data.relationships.filter( + (relationship) => { + return (relationship.source.type === FEAST_FCO_TYPES.dataSource && + relationship.source.name === data?.name && + relationship.target.type === FEAST_FCO_TYPES.featureView); + } + ); + + return { + ...registryQuery, + data, + consumingFeatureViews + }; +}; + +export default useLoadDataSource; diff --git a/ui/src/pages/entities/EntitiesListingTable.tsx b/ui/src/pages/entities/EntitiesListingTable.tsx new file mode 100644 index 0000000000..2e608e3769 --- /dev/null +++ b/ui/src/pages/entities/EntitiesListingTable.tsx @@ -0,0 +1,63 @@ +import React from "react"; +import { EuiBasicTable } from "@elastic/eui"; +import EuiCustomLink from "../../components/EuiCustomLink"; +import { FeastEntityType } from "../../parsers/feastEntities"; +import useFeatureViewEdgesByEntity from "./useFeatureViewEdgesByEntity"; +import { useParams } from "react-router-dom"; + +interface EntitiesListingTableProps { + entities: FeastEntityType[]; +} + +const EntitiesListingTable = ({ entities }: EntitiesListingTableProps) => { + const { isSuccess, data } = useFeatureViewEdgesByEntity(); + const { projectName } = useParams(); + + const columns = [ + { + name: "Name", + field: "spec.name", + sortable: true, + render: (name: string) => { + return ( + + {name} + + ); + }, + }, + { + name: "Type", + field: "spec.valueType", + sortable: true, + render: (valueType: string) => { + return valueType; + }, + }, + { + name: "# of FVs", + render: (item: FeastEntityType) => { + if (isSuccess && data) { + return data[item.spec.name] ? data[item.spec.name].length : "0"; + } else { + return "."; + } + }, + }, + ]; + + const getRowProps = (item: FeastEntityType) => { + return { + "data-test-subj": `row-${item.spec.name}`, + }; + }; + + return ( + + ); +}; + +export default EntitiesListingTable; diff --git a/ui/src/pages/entities/EntityIndexEmptyState.tsx b/ui/src/pages/entities/EntityIndexEmptyState.tsx new file mode 100644 index 0000000000..7720be2d53 --- /dev/null +++ b/ui/src/pages/entities/EntityIndexEmptyState.tsx @@ -0,0 +1,42 @@ +import React from "react"; +import { EuiEmptyPrompt, EuiTitle, EuiLink, EuiButton } from "@elastic/eui"; +import FeastIconBlue from "../../graphics/FeastIconBlue"; + +const EntityIndexEmptyState = () => { + return ( + There are no entities} + body={ +

+ This project does not have any Entities. Learn more about creating + Entities in Feast Docs. +

+ } + actions={ + { + window.open( + "https://docs.feast.dev/getting-started/concepts/entity", + "_blank" + ); + }} + > + Open Entities Docs + + } + footer={ + <> + +

Want to learn more?

+
+ + Read Feast documentation + + + } + /> + ); +}; + +export default EntityIndexEmptyState; diff --git a/ui/src/pages/entities/EntityInstance.tsx b/ui/src/pages/entities/EntityInstance.tsx new file mode 100644 index 0000000000..b44967b178 --- /dev/null +++ b/ui/src/pages/entities/EntityInstance.tsx @@ -0,0 +1,62 @@ +import React from "react"; +import { Route, Routes, useNavigate, useParams } from "react-router-dom"; +import { + EuiPageHeader, + EuiPageContent, + EuiPageContentBody, +} from "@elastic/eui"; + +import { EntityIcon32 } from "../../graphics/EntityIcon"; +import { useMatchExact } from "../../hooks/useMatchSubpath"; +import EntityOverviewTab from "./EntityOverviewTab"; +import { useDocumentTitle } from "../../hooks/useDocumentTitle"; +import { + useEntityCustomTabs, + useEntityCustomTabRoutes, +} from "../../custom-tabs/TabsRegistryContext"; + +const EntityInstance = () => { + const navigate = useNavigate(); + let { entityName } = useParams(); + + const { customNavigationTabs } = useEntityCustomTabs(navigate); + const CustomTabRoutes = useEntityCustomTabRoutes(); + + useDocumentTitle(`${entityName} | Entity | Feast`); + + return ( + + { + navigate(""); + }, + }, + ...customNavigationTabs, + ]} + /> + + + + } /> + {CustomTabRoutes} + + + + + ); +}; + +export default EntityInstance; diff --git a/ui/src/pages/entities/EntityOverviewTab.tsx b/ui/src/pages/entities/EntityOverviewTab.tsx new file mode 100644 index 0000000000..dce0c12824 --- /dev/null +++ b/ui/src/pages/entities/EntityOverviewTab.tsx @@ -0,0 +1,134 @@ +import { + EuiFlexGroup, + EuiHorizontalRule, + EuiLoadingSpinner, + EuiTitle, +} from "@elastic/eui"; +import { + EuiPanel, + EuiText, + EuiFlexItem, + EuiSpacer, + EuiStat, + EuiDescriptionList, + EuiDescriptionListTitle, + EuiDescriptionListDescription, +} from "@elastic/eui"; +import React from "react"; +import { useParams } from "react-router-dom"; +import TagsDisplay from "../../components/TagsDisplay"; +import FeatureViewEdgesList from "./FeatureViewEdgesList"; +import useFeatureViewEdgesByEntity from "./useFeatureViewEdgesByEntity"; +import useLoadEntity from "./useLoadEntity"; + +const EntityOverviewTab = () => { + let { entityName } = useParams(); + + const eName = entityName === undefined ? "" : entityName; + const { isLoading, isSuccess, isError, data } = useLoadEntity(eName); + const isEmpty = data === undefined; + + const fvEdges = useFeatureViewEdgesByEntity(); + const fvEdgesSuccess = fvEdges.isSuccess; + const fvEdgesData = fvEdges.data; + + return ( + + {isLoading && ( + + Loading + + )} + {isEmpty &&

No entity with name: {entityName}

} + {isError &&

Error loading entity: {entityName}

} + {isSuccess && data && ( + + + + + +

Properties

+
+ + + Join Key + + {data.spec.joinKey} + + + Description + + {data.spec.description} + + + Value Type + + {data.spec.valueType} + + +
+ + + + Created + + {data.meta.createdTimestamp ? ( + data.meta.createdTimestamp.toLocaleDateString("en-CA") + ) : ( + No createdTimestamp specified on this entity. + )} + + + Updated + + {data.meta.lastUpdatedTimestamp ? ( + data.meta.lastUpdatedTimestamp.toLocaleDateString("en-CA") + ) : ( + No lastUpdatedTimestamp specified on this entity. + )} + + + +
+ + + +

Feature Views

+
+ + {fvEdgesSuccess && fvEdgesData ? ( + fvEdgesData[eName] ? ( + { + return r.target.name; + })} + /> + ) : ( + No feature views have this entity + ) + ) : ( + + Error loading feature views that have this entity. + + )} +
+ + + +

Labels

+
+ + {data.spec.labels ? ( + + ) : ( + No labels specified on this entity. + )} +
+
+
+
+ )} +
+ ); +}; +export default EntityOverviewTab; diff --git a/ui/src/pages/entities/EntityRawData.tsx b/ui/src/pages/entities/EntityRawData.tsx new file mode 100644 index 0000000000..a6433566d7 --- /dev/null +++ b/ui/src/pages/entities/EntityRawData.tsx @@ -0,0 +1,24 @@ +import React from "react"; +import { EuiPanel } from "@elastic/eui"; +import { useParams } from "react-router-dom"; +import useLoadEntity from "./useLoadEntity"; + +const EntityRawData = () => { + let { entityName } = useParams(); + + const eName = entityName === undefined ? "" : entityName; + + const { isSuccess, data } = useLoadEntity(eName); + + return isSuccess && data ? ( + +
{JSON.stringify(data, null, 2)}
+
+ ) : ( + + No data so sad + + ); +}; + +export default EntityRawData; diff --git a/ui/src/pages/entities/FeatureViewEdgesList.tsx b/ui/src/pages/entities/FeatureViewEdgesList.tsx new file mode 100644 index 0000000000..95bc51c56d --- /dev/null +++ b/ui/src/pages/entities/FeatureViewEdgesList.tsx @@ -0,0 +1,89 @@ +import React from "react"; +import { EuiBasicTable, EuiLoadingSpinner } from "@elastic/eui"; +import EuiCustomLink from "../../components/EuiCustomLink"; +import { useParams } from "react-router-dom"; +import useLoadRelationshipData from "../../queries/useLoadRelationshipsData"; +import { EntityRelation } from "../../parsers/parseEntityRelationships"; +import { FEAST_FCO_TYPES } from "../../parsers/types"; + +interface FeatureViewEdgesListInterace { + fvNames: string[]; +} + +const whereFSconsumesThisFv = (fvName: string) => { + return (r: EntityRelation) => { + return ( + r.source.name === fvName && + r.target.type === FEAST_FCO_TYPES.featureService + ); + }; +}; + +const useGetFSConsumersOfFV = (fvList: string[]) => { + const relationshipQuery = useLoadRelationshipData(); + + const data = relationshipQuery.data + ? fvList.reduce((memo: Record, fvName) => { + if (relationshipQuery.data) { + memo[fvName] = relationshipQuery.data + .filter(whereFSconsumesThisFv(fvName)) + .map((fs) => { + return fs.target.name; + }); + } + + return memo; + }, {}) + : undefined; + + return { + ...relationshipQuery, + data, + }; +}; + +const FeatureViewEdgesList = ({ fvNames }: FeatureViewEdgesListInterace) => { + const { projectName } = useParams(); + + const { isLoading, data } = useGetFSConsumersOfFV(fvNames); + + const columns = [ + { + name: "Name", + field: "", + render: (name: string) => { + return ( + + {name} + + ); + }, + }, + { + name: "FS Consumers", + render: (name: string) => { + return ( + + {isLoading && } + {data && data[name].length} + + ); + }, + }, + ]; + + const getRowProps = (item: string) => { + return { + "data-test-subj": `row-${item}`, + }; + }; + + return ( + + ); +}; + +export default FeatureViewEdgesList; diff --git a/ui/src/pages/entities/Index.tsx b/ui/src/pages/entities/Index.tsx new file mode 100644 index 0000000000..43360f6866 --- /dev/null +++ b/ui/src/pages/entities/Index.tsx @@ -0,0 +1,67 @@ +import React, { useContext } from "react"; + +import { + EuiPageHeader, + EuiPageContent, + EuiPageContentBody, + EuiLoadingSpinner, +} from "@elastic/eui"; + +import { EntityIcon32 } from "../../graphics/EntityIcon"; + +import useLoadRegistry from "../../queries/useLoadRegistry"; +import EntitiesListingTable from "./EntitiesListingTable"; +import { useDocumentTitle } from "../../hooks/useDocumentTitle"; +import RegistryPathContext from "../../contexts/RegistryPathContext"; +import EntityIndexEmptyState from "./EntityIndexEmptyState"; + +const useLoadEntities = () => { + const registryUrl = useContext(RegistryPathContext); + const registryQuery = useLoadRegistry(registryUrl); + + const data = + registryQuery.data === undefined + ? undefined + : registryQuery.data.objects.entities; + + return { + ...registryQuery, + data, + }; +}; + +const Index = () => { + const { isLoading, isSuccess, isError, data } = useLoadEntities(); + + useDocumentTitle(`Entities | Feast`); + + return ( + + + + + {isLoading && ( +

+ Loading +

+ )} + {isError &&

We encountered an error while loading.

} + {isSuccess && !data && } + {isSuccess && data && } +
+
+
+ ); +}; + +export default Index; diff --git a/ui/src/pages/entities/useFeatureViewEdgesByEntity.ts b/ui/src/pages/entities/useFeatureViewEdgesByEntity.ts new file mode 100644 index 0000000000..01ea88cdb3 --- /dev/null +++ b/ui/src/pages/entities/useFeatureViewEdgesByEntity.ts @@ -0,0 +1,30 @@ +import useLoadRelationshipData from "../../queries/useLoadRelationshipsData"; +import { EntityRelation } from "../../parsers/parseEntityRelationships"; + +const entityGroupByName = (data: EntityRelation[]) => { + return data + .filter((edge) => { + return edge.source.type === "entity"; + }) + .reduce((memo: Record, current) => { + if (memo[current.source.name]) { + memo[current.source.name].push(current); + } else { + memo[current.source.name] = [current]; + } + + return memo; + }, {}); +}; + +const useFeatureViewEdgesByEntity = () => { + const query = useLoadRelationshipData(); + + return { + ...query, + data: + query.isSuccess && query.data ? entityGroupByName(query.data) : undefined, + }; +}; + +export default useFeatureViewEdgesByEntity; diff --git a/ui/src/pages/entities/useLoadEntity.ts b/ui/src/pages/entities/useLoadEntity.ts new file mode 100644 index 0000000000..a1ca6d55c1 --- /dev/null +++ b/ui/src/pages/entities/useLoadEntity.ts @@ -0,0 +1,22 @@ +import { useContext } from "react"; +import RegistryPathContext from "../../contexts/RegistryPathContext"; +import useLoadRegistry from "../../queries/useLoadRegistry"; + +const useLoadEntity = (entityName: string) => { + const registryUrl = useContext(RegistryPathContext); + const registryQuery = useLoadRegistry(registryUrl); + + const data = + registryQuery.data === undefined + ? undefined + : registryQuery.data.objects.entities?.find( + (fv) => fv.spec.name === entityName + ); + + return { + ...registryQuery, + data, + }; +}; + +export default useLoadEntity; diff --git a/ui/src/pages/feature-services/FeatureServiceIndexEmptyState.tsx b/ui/src/pages/feature-services/FeatureServiceIndexEmptyState.tsx new file mode 100644 index 0000000000..40778680a3 --- /dev/null +++ b/ui/src/pages/feature-services/FeatureServiceIndexEmptyState.tsx @@ -0,0 +1,42 @@ +import React from "react"; +import { EuiEmptyPrompt, EuiTitle, EuiLink, EuiButton } from "@elastic/eui"; +import FeastIconBlue from "../../graphics/FeastIconBlue"; + +const FeatureServiceIndexEmptyState = () => { + return ( + There are no feature services} + body={ +

+ This project does not have any Feature Services. Learn more about + creating Feature Services in Feast Docs. +

+ } + actions={ + { + window.open( + "https://docs.feast.dev/getting-started/concepts/feature-service", + "_blank" + ); + }} + > + Open Feature Services Docs + + } + footer={ + <> + +

Want to learn more?

+
+ + Read Feast documentation + + + } + /> + ); +}; + +export default FeatureServiceIndexEmptyState; diff --git a/ui/src/pages/feature-services/FeatureServiceInstance.tsx b/ui/src/pages/feature-services/FeatureServiceInstance.tsx new file mode 100644 index 0000000000..cf0b09a057 --- /dev/null +++ b/ui/src/pages/feature-services/FeatureServiceInstance.tsx @@ -0,0 +1,63 @@ +import React from "react"; +import { Route, Routes, useNavigate, useParams } from "react-router-dom"; +import { + EuiPageHeader, + EuiPageContent, + EuiPageContentBody, +} from "@elastic/eui"; + +import { FeatureServiceIcon32 } from "../../graphics/FeatureServiceIcon"; +import { useMatchExact } from "../../hooks/useMatchSubpath"; +import FeatureServiceOverviewTab from "./FeatureServiceOverviewTab"; +import { useDocumentTitle } from "../../hooks/useDocumentTitle"; + +import { + useFeatureServiceCustomTabs, + useFeatureServiceCustomTabRoutes, +} from "../../custom-tabs/TabsRegistryContext"; + +const FeatureServiceInstance = () => { + const navigate = useNavigate(); + let { featureServiceName } = useParams(); + + useDocumentTitle(`${featureServiceName} | Feature Service | Feast`); + + const { customNavigationTabs } = useFeatureServiceCustomTabs(navigate); + const CustomTabRoutes = useFeatureServiceCustomTabRoutes(); + + return ( + + { + navigate(""); + }, + }, + ...customNavigationTabs, + ]} + /> + + + + } /> + {CustomTabRoutes} + + + + + ); +}; + +export default FeatureServiceInstance; diff --git a/ui/src/pages/feature-services/FeatureServiceListingTable.tsx b/ui/src/pages/feature-services/FeatureServiceListingTable.tsx new file mode 100644 index 0000000000..c6205b020a --- /dev/null +++ b/ui/src/pages/feature-services/FeatureServiceListingTable.tsx @@ -0,0 +1,96 @@ +import React from "react"; +import { + EuiBasicTable, + EuiTableComputedColumnType, + EuiTableFieldDataColumnType, +} from "@elastic/eui"; +import EuiCustomLink from "../../components/EuiCustomLink"; +import { + FeastFeatureInServiceType, + FeastFeatureServiceType, +} from "../../parsers/feastFeatureServices"; +import { useParams } from "react-router-dom"; + +interface FeatureServiceListingTableProps { + tagKeysSet: Set; + featureServices: FeastFeatureServiceType[]; +} + +type FeatureServiceTypeColumn = + | EuiTableFieldDataColumnType + | EuiTableComputedColumnType; + +const FeatureServiceListingTable = ({ + tagKeysSet, + featureServices, +}: FeatureServiceListingTableProps) => { + const { projectName } = useParams(); + + const columns: FeatureServiceTypeColumn[] = [ + { + name: "Name", + field: "spec.name", + render: (name: string) => { + return ( + + {name} + + ); + }, + }, + { + name: "# of Features", + field: "spec.features", + render: (featureViews: FeastFeatureInServiceType[]) => { + var numFeatures = 0; + featureViews.forEach((featureView) => { + numFeatures += featureView.featureColumns.length; + }); + return numFeatures; + }, + }, + { + name: "Created at", + field: "meta.createdTimestamp", + render: (date: Date) => { + return date.toLocaleDateString("en-CA"); + }, + }, + ]; + + tagKeysSet.forEach((key) => { + columns.push({ + name: key, + render: (item: FeastFeatureServiceType) => { + let tag = n/a; + + const value = item.spec.tags ? item.spec.tags[key] : undefined; + + if (value) { + tag = {value}; + } + + return tag; + }, + }); + }); + + const getRowProps = (item: FeastFeatureServiceType) => { + return { + "data-test-subj": `row-${item.spec.name}`, + }; + }; + + return ( + + ); +}; + +export default FeatureServiceListingTable; diff --git a/ui/src/pages/feature-services/FeatureServiceOverviewTab.tsx b/ui/src/pages/feature-services/FeatureServiceOverviewTab.tsx new file mode 100644 index 0000000000..a3fc897325 --- /dev/null +++ b/ui/src/pages/feature-services/FeatureServiceOverviewTab.tsx @@ -0,0 +1,169 @@ +import { + EuiBadge, + EuiFlexGroup, + EuiFlexItem, + EuiHorizontalRule, + EuiLoadingSpinner, + EuiPanel, + EuiSpacer, + EuiStat, + EuiText, + EuiTextAlign, + EuiTitle, +} from "@elastic/eui"; +import React from "react"; +import { useParams } from "react-router-dom"; +import { useNavigate } from "react-router-dom"; +import FeaturesInServiceList from "../../components/FeaturesInServiceDisplay"; +import TagsDisplay from "../../components/TagsDisplay"; +import { encodeSearchQueryString } from "../../hooks/encodeSearchQueryString"; +import FeatureViewEdgesList from "../entities/FeatureViewEdgesList"; +import useLoadFeatureService from "./useLoadFeatureService"; + +const FeatureServiceOverviewTab = () => { + let { featureServiceName, projectName } = useParams(); + + const fsName = featureServiceName === undefined ? "" : featureServiceName; + + const { isLoading, isSuccess, isError, data, entities } = + useLoadFeatureService(fsName); + const isEmpty = data === undefined; + + let numFeatures = 0; + let numFeatureViews = 0; + if (data) { + data.spec.features.forEach((featureView) => { + numFeatureViews += 1; + numFeatures += featureView.featureColumns.length; + }); + } + + const navigate = useNavigate(); + + return ( + + {isLoading && ( + + Loading + + )} + {isEmpty &&

No feature service with name: {featureServiceName}

} + {isError &&

Error loading feature service: {featureServiceName}

} + {isSuccess && data && ( + + + + + + + +

from

+
+
+ + + + + + +
+ + + + +

Features

+
+ + {data.spec.features ? ( + + ) : ( + + No features specified for this feature service. + + )} +
+
+ + + +

Tags

+
+ + {data.spec.tags ? ( + { + return ( + `/p/${projectName}/feature-service?` + + encodeSearchQueryString(`${key}:${value}`) + ); + }} + /> + ) : ( + No Tags specified on this feature service. + )} +
+ + + +

Entities

+
+ + {entities ? ( + + {entities.map((entity) => { + return ( + + { + navigate( + `/p/${projectName}/entity/${entity.name}` + ); + }} + onClickAriaLabel={entity.name} + data-test-sub="testExample1" + > + {entity.name} + + + ); + })} + + ) : ( + No Entities. + )} +
+ + + +

All Feature Views

+
+ + {data.spec.features.length > 0 ? ( + { + return f.featureViewName; + })} + /> + ) : ( + No feature views in this feature service + )} +
+
+
+
+ )} +
+ ); +}; + +export default FeatureServiceOverviewTab; diff --git a/ui/src/pages/feature-services/FeatureServiceRawData.tsx b/ui/src/pages/feature-services/FeatureServiceRawData.tsx new file mode 100644 index 0000000000..7fd8d2534a --- /dev/null +++ b/ui/src/pages/feature-services/FeatureServiceRawData.tsx @@ -0,0 +1,24 @@ +import React from "react"; +import { EuiPanel } from "@elastic/eui"; +import { useParams } from "react-router-dom"; +import useLoadFeatureService from "./useLoadFeatureService"; + +const FeatureServiceRawData = () => { + let { featureServiceName } = useParams(); + + const fsName = featureServiceName === undefined ? "" : featureServiceName; + + const { isSuccess, data } = useLoadFeatureService(fsName); + + return isSuccess && data ? ( + +
{JSON.stringify(data, null, 2)}
+
+ ) : ( + + No data so sad + + ); +}; + +export default FeatureServiceRawData; diff --git a/ui/src/pages/feature-services/Index.tsx b/ui/src/pages/feature-services/Index.tsx new file mode 100644 index 0000000000..441f3cf82c --- /dev/null +++ b/ui/src/pages/feature-services/Index.tsx @@ -0,0 +1,176 @@ +import React, { useContext } from "react"; + +import { + EuiPageHeader, + EuiPageContent, + EuiPageContentBody, + EuiLoadingSpinner, + EuiTitle, + EuiSpacer, + EuiFlexGroup, + EuiFlexItem, + EuiFieldSearch, +} from "@elastic/eui"; + +import { FeatureServiceIcon32 } from "../../graphics/FeatureServiceIcon"; + +import useLoadRegistry from "../../queries/useLoadRegistry"; +import FeatureServiceListingTable from "./FeatureServiceListingTable"; +import { + useSearchQuery, + useTagsWithSuggestions, + filterInputInterface, + tagTokenGroupsType, +} from "../../hooks/useSearchInputWithTags"; +import { FeastFeatureServiceType } from "../../parsers/feastFeatureServices"; +import { useDocumentTitle } from "../../hooks/useDocumentTitle"; +import RegistryPathContext from "../../contexts/RegistryPathContext"; +import FeatureServiceIndexEmptyState from "./FeatureServiceIndexEmptyState"; +import TagSearch from "../../components/TagSearch"; +import { useFeatureServiceTagsAggregation } from "../../hooks/useTagsAggregation"; + +const useLoadFeatureServices = () => { + const registryUrl = useContext(RegistryPathContext); + const registryQuery = useLoadRegistry(registryUrl); + + const data = + registryQuery.data === undefined + ? undefined + : registryQuery.data.objects.featureServices; + + return { + ...registryQuery, + data, + }; +}; + +const shouldIncludeFSsGivenTokenGroups = ( + entry: FeastFeatureServiceType, + tagTokenGroups: tagTokenGroupsType +) => { + return Object.entries(tagTokenGroups).every(([key, values]) => { + const entryTagValue = entry.spec.tags ? entry.spec.tags[key] : undefined; + + if (entryTagValue) { + return values.every((value) => { + return value.length > 0 ? entryTagValue.indexOf(value) >= 0 : true; // Don't filter if the string is empty + }); + } else { + return false; + } + }); +}; + +const filterFn = ( + data: FeastFeatureServiceType[], + filterInput: filterInputInterface +) => { + let filteredByTags = data; + + if (Object.keys(filterInput.tagTokenGroups).length) { + filteredByTags = data.filter((entry) => { + return shouldIncludeFSsGivenTokenGroups( + entry, + filterInput.tagTokenGroups + ); + }); + } + + if (filterInput.searchTokens.length) { + return filteredByTags.filter((entry) => { + return filterInput.searchTokens.find((token) => { + return token.length >= 3 && entry.spec.name.indexOf(token) >= 0; + }); + }); + } + + return filteredByTags; +}; + +const Index = () => { + const { isLoading, isSuccess, isError, data } = useLoadFeatureServices(); + const tagAggregationQuery = useFeatureServiceTagsAggregation(); + + useDocumentTitle(`Feature Services | Feast`); + + const { searchString, searchTokens, setSearchString } = useSearchQuery(); + + const { + currentTag, + tagsString, + tagTokenGroups, + tagKeysSet, + tagSuggestions, + suggestionMode, + setTagsString, + acceptSuggestion, + setCursorPosition, + } = useTagsWithSuggestions(tagAggregationQuery.data); + + const filterResult = data + ? filterFn(data, { tagTokenGroups, searchTokens }) + : data; + + return ( + + + + + {isLoading && ( +

+ Loading +

+ )} + {isError &&

We encountered an error while loading.

} + {isSuccess && !data && } + {isSuccess && filterResult && ( + + + + +

Search

+
+ { + setSearchString(e.target.value); + }} + /> +
+ + + +
+ + +
+ )} +
+
+
+ ); +}; + +export default Index; diff --git a/ui/src/pages/feature-services/useLoadFeatureService.ts b/ui/src/pages/feature-services/useLoadFeatureService.ts new file mode 100644 index 0000000000..be2242eae0 --- /dev/null +++ b/ui/src/pages/feature-services/useLoadFeatureService.ts @@ -0,0 +1,48 @@ +import { FEAST_FCO_TYPES } from "../../parsers/types"; +import { useContext } from "react"; +import RegistryPathContext from "../../contexts/RegistryPathContext"; + +import useLoadRegistry from "../../queries/useLoadRegistry"; +import { EntityReference } from "../../parsers/parseEntityRelationships"; + +const useLoadFeatureService = (featureServiceName: string) => { + const registryUrl = useContext(RegistryPathContext); + const registryQuery = useLoadRegistry(registryUrl); + + const data = + registryQuery.data === undefined + ? undefined + : registryQuery.data.objects.featureServices?.find( + (fs) => fs.spec.name === featureServiceName + ); + + let entities = + data === undefined + ? undefined + : registryQuery.data?.indirectRelationships + .filter((relationship) => { + return ( + relationship.target.type === FEAST_FCO_TYPES.featureService && + relationship.target.name === data.spec.name && + relationship.source.type === FEAST_FCO_TYPES.entity + ); + }) + .map((relationship) => { + return relationship.source; + }); + // Deduplicate on name of entity + if (entities) { + let entityToName: { [key: string]: EntityReference } = {}; + for (let entity of entities) { + entityToName[entity.name] = entity; + } + entities = Object.values(entityToName); + } + return { + ...registryQuery, + data, + entities, + }; +}; + +export default useLoadFeatureService; diff --git a/ui/src/pages/feature-views/ConsumingFeatureServicesList.tsx b/ui/src/pages/feature-views/ConsumingFeatureServicesList.tsx new file mode 100644 index 0000000000..fc98fe8e5e --- /dev/null +++ b/ui/src/pages/feature-views/ConsumingFeatureServicesList.tsx @@ -0,0 +1,43 @@ +import React from "react"; +import { EuiBasicTable } from "@elastic/eui"; +import EuiCustomLink from "../../components/EuiCustomLink"; +import { useParams } from "react-router-dom"; + +interface ConsumingFeatureServicesListInterace { + fsNames: string[]; +} + +const ConsumingFeatureServicesList = ({ + fsNames, +}: ConsumingFeatureServicesListInterace) => { + const { projectName } = useParams(); + + const columns = [ + { + name: "Name", + field: "", + render: (name: string) => { + return ( + + {name} + + ); + }, + }, + ]; + + const getRowProps = (item: string) => { + return { + "data-test-subj": `row-${item}`, + }; + }; + + return ( + + ); +}; + +export default ConsumingFeatureServicesList; diff --git a/ui/src/pages/feature-views/FeatureViewIndexEmptyState.tsx b/ui/src/pages/feature-views/FeatureViewIndexEmptyState.tsx new file mode 100644 index 0000000000..a96e23106b --- /dev/null +++ b/ui/src/pages/feature-views/FeatureViewIndexEmptyState.tsx @@ -0,0 +1,42 @@ +import React from "react"; +import { EuiEmptyPrompt, EuiTitle, EuiLink, EuiButton } from "@elastic/eui"; +import FeastIconBlue from "../../graphics/FeastIconBlue"; + +const FeatureViewIndexEmptyState = () => { + return ( + There are no feature views} + body={ +

+ This project does not have any Feature Views. Learn more about + creating Feature Views in Feast Docs. +

+ } + actions={ + { + window.open( + "https://docs.feast.dev/getting-started/concepts/feature-view", + "_blank" + ); + }} + > + Open Feature View Docs + + } + footer={ + <> + +

Want to learn more?

+
+ + Read Feast documentation + + + } + /> + ); +}; + +export default FeatureViewIndexEmptyState; diff --git a/ui/src/pages/feature-views/FeatureViewInstance.tsx b/ui/src/pages/feature-views/FeatureViewInstance.tsx new file mode 100644 index 0000000000..b0fa7c32b0 --- /dev/null +++ b/ui/src/pages/feature-views/FeatureViewInstance.tsx @@ -0,0 +1,53 @@ +import React from "react"; + +import { useParams } from "react-router-dom"; +import { EuiLoadingSpinner } from "@elastic/eui"; + +import { FeastFeatureViewType } from "../../parsers/feastFeatureViews"; +import RegularFeatureInstance from "./RegularFeatureViewInstance"; +import { FEAST_FV_TYPES } from "../../parsers/mergedFVTypes"; +import { FeastODFVType } from "../../parsers/feastODFVS"; +import useLoadFeatureView from "./useLoadFeatureView"; +import OnDemandFeatureInstance from "./OnDemandFeatureViewInstance"; + +const FeatureViewInstance = () => { + const { featureViewName } = useParams(); + + const fvName = featureViewName === undefined ? "" : featureViewName; + + const { isLoading, isSuccess, isError, data } = useLoadFeatureView(fvName); + const isEmpty = data === undefined; + + if (isLoading) { + return ( + + Loading + + ); + } + if (isEmpty) { + return

No feature view with name: {featureViewName}

; + } + + if (isError) { + isError &&

Error loading feature view: {featureViewName}

; + } + + if (isSuccess && !isEmpty) { + if (data.type === FEAST_FV_TYPES.regular) { + const fv: FeastFeatureViewType = data.object; + + return ; + } + + if (data.type === FEAST_FV_TYPES.ondemand) { + const odfv: FeastODFVType = data.object; + + return ; + } + } + + return

No Data So Sad

; +}; + +export default FeatureViewInstance; diff --git a/ui/src/pages/feature-views/FeatureViewListingTable.tsx b/ui/src/pages/feature-views/FeatureViewListingTable.tsx new file mode 100644 index 0000000000..59f8b1ed7a --- /dev/null +++ b/ui/src/pages/feature-views/FeatureViewListingTable.tsx @@ -0,0 +1,90 @@ +import React from "react"; +import { + EuiBasicTable, + EuiBadge, + EuiTableFieldDataColumnType, +} from "@elastic/eui"; +import EuiCustomLink from "../../components/EuiCustomLink"; +import { genericFVType } from "../../parsers/mergedFVTypes"; +import { EuiTableComputedColumnType } from "@elastic/eui/src/components/basic_table"; +import { useParams } from "react-router-dom"; + +interface FeatureViewListingTableProps { + tagKeysSet: Set; + featureViews: genericFVType[]; +} + +type genericFVTypeColumn = + | EuiTableFieldDataColumnType + | EuiTableComputedColumnType; + +const FeatureViewListingTable = ({ + tagKeysSet, + featureViews, +}: FeatureViewListingTableProps) => { + const { projectName } = useParams(); + + const columns: genericFVTypeColumn[] = [ + { + name: "Name", + field: "name", + sortable: true, + render: (name: string, item: genericFVType) => { + return ( + + {name} {item.type === "ondemand" && ondemand} + + ); + }, + }, + { + name: "# of Features", + field: "features", + sortable: true, + render: (features: unknown[]) => { + return features.length; + }, + }, + ]; + + // Add columns if they come up in search + tagKeysSet.forEach((key) => { + columns.push({ + name: key, + render: (item: genericFVType) => { + let tag = n/a; + + if (item.type === "regular") { + const value = item.object.spec.tags + ? item.object.spec.tags[key] + : undefined; + + if (value) { + tag = {value}; + } + } + + return tag; + }, + }); + }); + + const getRowProps = (item: genericFVType) => { + return { + "data-test-subj": `row-${item.name}`, + }; + }; + + return ( + + ); +}; + +export default FeatureViewListingTable; diff --git a/ui/src/pages/feature-views/FeatureViewRawData.tsx b/ui/src/pages/feature-views/FeatureViewRawData.tsx new file mode 100644 index 0000000000..6bb7154813 --- /dev/null +++ b/ui/src/pages/feature-views/FeatureViewRawData.tsx @@ -0,0 +1,24 @@ +import React from "react"; +import { EuiPanel } from "@elastic/eui"; +import { useParams } from "react-router-dom"; +import useLoadFeatureView from "./useLoadFeatureView"; + +const FeatureViewRawData = () => { + let { featureViewName } = useParams(); + + const fvName = featureViewName === undefined ? "" : featureViewName; + + const { isSuccess, data } = useLoadFeatureView(fvName); + + return isSuccess && data ? ( + +
{JSON.stringify(data, null, 2)}
+
+ ) : ( + + No data so sad + + ); +}; + +export default FeatureViewRawData; diff --git a/ui/src/pages/feature-views/FeatureViewSummaryStatisticsTab.tsx b/ui/src/pages/feature-views/FeatureViewSummaryStatisticsTab.tsx new file mode 100644 index 0000000000..7371d4a73b --- /dev/null +++ b/ui/src/pages/feature-views/FeatureViewSummaryStatisticsTab.tsx @@ -0,0 +1,84 @@ +import React from "react"; + +import { EuiEmptyPrompt, EuiLoadingContent, EuiTitle } from "@elastic/eui"; +import { useParams } from "react-router-dom"; +import useLoadFeatureViewSummaryStatistics from "../../queries/useLoadFeatureViewSummaryStatistics"; +import { + NumericColumnSummaryStatisticType, + StringColumnSummaryStatisticType, +} from "../../parsers/featureViewSummaryStatistics"; +import NumericFeaturesTable from "../../components/NumericFeaturesTable"; + +interface ColumnsByGroup { + INT64?: NumericColumnSummaryStatisticType[]; + STRING?: StringColumnSummaryStatisticType[]; +} + +const FeatureViewSummaryStatisticsTab = () => { + let { featureViewName } = useParams(); + + if (!featureViewName) { + throw new Error("Unable to get Feature View Name"); + } + + const { isError, data } = + useLoadFeatureViewSummaryStatistics(featureViewName); + + if (isError) { + return ( + Error loading Statistics} + body={ +

+ There was an error loading statistics for{" "} + {featureViewName}. Please check that statistics + have been generated. +

+ } + /> + ); + } + + if (data) { + const columnsByGroup = Object.entries( + data.columnsSummaryStatistics + ).reduce((memo, [key, columnStatistics]) => { + if (columnStatistics.valueType === "INT64") { + if (!memo["INT64"]) { + memo[columnStatistics.valueType] = [columnStatistics]; + } else { + memo["INT64"].push(columnStatistics); + } + } + + if (columnStatistics.valueType === "STRING") { + if (!memo["STRING"]) { + memo[columnStatistics.valueType] = [columnStatistics]; + } else { + memo["STRING"].push(columnStatistics); + } + } + + return memo; + }, {}); + + return ( + + {columnsByGroup["INT64"] && ( + <> + +

Numeric Columns

+
+ + + )} +
+ ); + } + + return ; +}; + +export default FeatureViewSummaryStatisticsTab; diff --git a/ui/src/pages/feature-views/Index.tsx b/ui/src/pages/feature-views/Index.tsx new file mode 100644 index 0000000000..3abd42a22b --- /dev/null +++ b/ui/src/pages/feature-views/Index.tsx @@ -0,0 +1,178 @@ +import React, { useContext } from "react"; + +import { + EuiPageHeader, + EuiPageContent, + EuiPageContentBody, + EuiLoadingSpinner, + EuiSpacer, + EuiTitle, + EuiFieldSearch, + EuiFlexGroup, + EuiFlexItem, +} from "@elastic/eui"; + +import { FeatureViewIcon32 } from "../../graphics/FeatureViewIcon"; + +import useLoadRegistry from "../../queries/useLoadRegistry"; +import FeatureViewListingTable from "./FeatureViewListingTable"; +import { + filterInputInterface, + useSearchQuery, + useTagsWithSuggestions, +} from "../../hooks/useSearchInputWithTags"; +import { genericFVType, regularFVInterface } from "../../parsers/mergedFVTypes"; +import { useDocumentTitle } from "../../hooks/useDocumentTitle"; +import RegistryPathContext from "../../contexts/RegistryPathContext"; +import FeatureViewIndexEmptyState from "./FeatureViewIndexEmptyState"; +import { useFeatureViewTagsAggregation } from "../../hooks/useTagsAggregation"; +import TagSearch from "../../components/TagSearch"; + +const useLoadFeatureViews = () => { + const registryUrl = useContext(RegistryPathContext); + const registryQuery = useLoadRegistry(registryUrl); + + const data = + registryQuery.data === undefined + ? undefined + : registryQuery.data.mergedFVList; + + return { + ...registryQuery, + data, + }; +}; + +const shouldIncludeFVsGivenTokenGroups = ( + entry: regularFVInterface, + tagTokenGroups: Record +) => { + return Object.entries(tagTokenGroups).every(([key, values]) => { + const entryTagValue = entry.object.spec.tags + ? entry.object.spec.tags[key] + : undefined; + + if (entryTagValue) { + return values.every((value) => { + return value.length > 0 ? entryTagValue.indexOf(value) >= 0 : true; // Don't filter if the string is empty + }); + } else { + return false; + } + }); +}; + +const filterFn = (data: genericFVType[], filterInput: filterInputInterface) => { + let filteredByTags = data; + + if (Object.keys(filterInput.tagTokenGroups).length) { + filteredByTags = data.filter((entry) => { + if (entry.type === "regular") { + return shouldIncludeFVsGivenTokenGroups( + entry, + filterInput.tagTokenGroups + ); + } else { + return false; // ODFVs don't have tags yet + } + }); + } + + if (filterInput.searchTokens.length) { + return filteredByTags.filter((entry) => { + return filterInput.searchTokens.find((token) => { + return token.length >= 3 && entry.name.indexOf(token) >= 0; + }); + }); + } + + return filteredByTags; +}; + +const Index = () => { + const { isLoading, isSuccess, isError, data } = useLoadFeatureViews(); + const tagAggregationQuery = useFeatureViewTagsAggregation(); + + useDocumentTitle(`Feature Views | Feast`); + + const { searchString, searchTokens, setSearchString } = useSearchQuery(); + + const { + currentTag, + tagsString, + tagTokenGroups, + tagKeysSet, + tagSuggestions, + suggestionMode, + setTagsString, + acceptSuggestion, + setCursorPosition, + } = useTagsWithSuggestions(tagAggregationQuery.data); + + const filterResult = data + ? filterFn(data, { tagTokenGroups, searchTokens }) + : data; + + return ( + + + + + {isLoading && ( +

+ Loading +

+ )} + {isError &&

We encountered an error while loading.

} + {isSuccess && data?.length === 0 && } + {isSuccess && data && data.length > 0 && filterResult && ( + + + + +

Search

+
+ { + setSearchString(e.target.value); + }} + /> +
+ + + +
+ + +
+ )} +
+
+
+ ); +}; + +export default Index; diff --git a/ui/src/pages/feature-views/OnDemandFeatureViewInstance.tsx b/ui/src/pages/feature-views/OnDemandFeatureViewInstance.tsx new file mode 100644 index 0000000000..1db2f5194f --- /dev/null +++ b/ui/src/pages/feature-views/OnDemandFeatureViewInstance.tsx @@ -0,0 +1,69 @@ +import React from "react"; +import { Route, Routes, useNavigate } from "react-router-dom"; +import { useParams } from "react-router-dom"; +import { + EuiPageHeader, + EuiPageContent, + EuiPageContentBody, +} from "@elastic/eui"; + +import { FeatureViewIcon32 } from "../../graphics/FeatureViewIcon"; +import { useMatchExact } from "../../hooks/useMatchSubpath"; +import { FeastODFVType } from "../../parsers/feastODFVS"; +import OnDemandFeatureViewOverviewTab from "./OnDemandFeatureViewOverviewTab"; + +import { + useOnDemandFeatureViewCustomTabs, + useOnDemandFeatureViewCustomTabRoutes, +} from "../../custom-tabs/TabsRegistryContext"; + +interface OnDemandFeatureInstanceProps { + data: FeastODFVType; +} + +const OnDemandFeatureInstance = ({ data }: OnDemandFeatureInstanceProps) => { + const navigate = useNavigate(); + let { featureViewName } = useParams(); + + const { customNavigationTabs } = useOnDemandFeatureViewCustomTabs(navigate); + const CustomTabRoutes = useOnDemandFeatureViewCustomTabRoutes(); + + return ( + + { + navigate(""); + }, + }, + ...customNavigationTabs, + ]} + /> + + + + } + /> + {CustomTabRoutes} + + + + + ); +}; + +export default OnDemandFeatureInstance; diff --git a/ui/src/pages/feature-views/OnDemandFeatureViewOverviewTab.tsx b/ui/src/pages/feature-views/OnDemandFeatureViewOverviewTab.tsx new file mode 100644 index 0000000000..9bd725534e --- /dev/null +++ b/ui/src/pages/feature-views/OnDemandFeatureViewOverviewTab.tsx @@ -0,0 +1,140 @@ +import { + EuiFlexGroup, + EuiFlexItem, + EuiHorizontalRule, + EuiText, + EuiTitle, + EuiPanel, + EuiCodeBlock, + EuiSpacer, +} from "@elastic/eui"; +import React from "react"; +import FeaturesListDisplay from "../../components/FeaturesListDisplay"; +import { + FeastODFVType, + RequestDataSourceType, + FeatureViewProjectionType, +} from "../../parsers/feastODFVS"; +import { EntityRelation } from "../../parsers/parseEntityRelationships"; +import { FEAST_FCO_TYPES } from "../../parsers/types"; +import useLoadRelationshipData from "../../queries/useLoadRelationshipsData"; +import FeatureViewProjectionDisplayPanel from "./components/FeatureViewProjectionDisplayPanel"; +import RequestDataDisplayPanel from "./components/RequestDataDisplayPanel"; +import ConsumingFeatureServicesList from "./ConsumingFeatureServicesList"; + +interface OnDemandFeatureViewOverviewTabProps { + data: FeastODFVType; +} + +const whereFSconsumesThisFv = (fvName: string) => { + return (r: EntityRelation) => { + return ( + r.source.name === fvName && + r.target.type === FEAST_FCO_TYPES.featureService + ); + }; +}; + +const OnDemandFeatureViewOverviewTab = ({ + data, +}: OnDemandFeatureViewOverviewTabProps) => { + const inputs = Object.entries(data.spec.inputs); + + const relationshipQuery = useLoadRelationshipData(); + const fsNames = relationshipQuery.data + ? relationshipQuery.data + .filter(whereFSconsumesThisFv(data.spec.name)) + .map((fs) => { + return fs.target.name; + }) + : []; + + return ( + + + + + +

Transformation

+
+ + + {data.spec.userDefinedFunction.body} + +
+
+
+ + + + +

Features ({data.spec.features.length})

+
+ + {data.spec.features ? ( + + ) : ( + No Tags sepcified on this feature view. + )} +
+
+ + + +

Inputs ({inputs.length})

+
+ + + {inputs.map(([key, inputGroup]) => { + if ((inputGroup as RequestDataSourceType).requestDataSource) { + return ( + + + + ); + } + + if (inputGroup as FeatureViewProjectionType) { + return ( + + + + ); + } + + return ( + + + {JSON.stringify(inputGroup, null, 2)} + + + ); + })} + +
+ + + +

Consuming Feature Services

+
+ + {fsNames.length > 0 ? ( + + ) : ( + No services consume this feature view + )} +
+
+
+
+ ); +}; + +export default OnDemandFeatureViewOverviewTab; diff --git a/ui/src/pages/feature-views/RegularFeatureViewInstance.tsx b/ui/src/pages/feature-views/RegularFeatureViewInstance.tsx new file mode 100644 index 0000000000..7200163614 --- /dev/null +++ b/ui/src/pages/feature-views/RegularFeatureViewInstance.tsx @@ -0,0 +1,89 @@ +import React, { useContext } from "react"; +import { Route, Routes, useNavigate } from "react-router-dom"; +import { + EuiPageHeader, + EuiPageContent, + EuiPageContentBody, +} from "@elastic/eui"; + +import { FeatureViewIcon32 } from "../../graphics/FeatureViewIcon"; + +import { useMatchExact, useMatchSubpath } from "../../hooks/useMatchSubpath"; +import { FeastFeatureViewType } from "../../parsers/feastFeatureViews"; +import RegularFeatureViewOverviewTab from "./RegularFeatureViewOverviewTab"; +import FeatureViewSummaryStatisticsTab from "./FeatureViewSummaryStatisticsTab"; + +import { + useRegularFeatureViewCustomTabs, + useRegularFeatureViewCustomTabRoutes, +} from "../../custom-tabs/TabsRegistryContext"; +import FeatureFlagsContext from "../../contexts/FeatureFlagsContext"; + +interface RegularFeatureInstanceProps { + data: FeastFeatureViewType; +} + +const RegularFeatureInstance = ({ data }: RegularFeatureInstanceProps) => { + const { enabledFeatureStatistics } = useContext(FeatureFlagsContext); + const navigate = useNavigate(); + + const { customNavigationTabs } = useRegularFeatureViewCustomTabs(navigate); + let tabs = [ + { + label: "Overview", + isSelected: useMatchExact(""), + onClick: () => { + navigate(""); + }, + }, + ]; + + let statisticsIsSelected = useMatchSubpath("statistics"); + if (enabledFeatureStatistics) { + tabs.push({ + label: "Statistics", + isSelected: statisticsIsSelected, + onClick: () => { + navigate("statistics"); + }, + }); + } + + tabs.push(...customNavigationTabs); + + const TabRoutes = useRegularFeatureViewCustomTabRoutes(); + + return ( + + + + + + } + /> + } + /> + {TabRoutes} + + + + + ); +}; + +export default RegularFeatureInstance; diff --git a/ui/src/pages/feature-views/RegularFeatureViewOverviewTab.tsx b/ui/src/pages/feature-views/RegularFeatureViewOverviewTab.tsx new file mode 100644 index 0000000000..72ea646c95 --- /dev/null +++ b/ui/src/pages/feature-views/RegularFeatureViewOverviewTab.tsx @@ -0,0 +1,198 @@ +import { + EuiBadge, + EuiFlexGroup, + EuiFlexItem, + EuiHorizontalRule, + EuiPanel, + EuiSpacer, + EuiStat, + EuiText, + EuiTitle, +} from "@elastic/eui"; +import React from "react"; + +import { useNavigate, useParams } from "react-router-dom"; +import FeaturesListDisplay from "../../components/FeaturesListDisplay"; +import TagsDisplay from "../../components/TagsDisplay"; +import { encodeSearchQueryString } from "../../hooks/encodeSearchQueryString"; +import { FeastFeatureViewType } from "../../parsers/feastFeatureViews"; +import { EntityRelation } from "../../parsers/parseEntityRelationships"; +import { FEAST_FCO_TYPES } from "../../parsers/types"; +import useLoadRelationshipData from "../../queries/useLoadRelationshipsData"; +import BatchSourcePropertiesView from "../data-sources/BatchSourcePropertiesView"; +import ConsumingFeatureServicesList from "./ConsumingFeatureServicesList"; + +const whereFSconsumesThisFv = (fvName: string) => { + return (r: EntityRelation) => { + return ( + r.source.name === fvName && + r.target.type === FEAST_FCO_TYPES.featureService + ); + }; +}; + +interface RegularFeatureViewOverviewTabProps { + data: FeastFeatureViewType; +} + +const RegularFeatureViewOverviewTab = ({ + data, +}: RegularFeatureViewOverviewTabProps) => { + const navigate = useNavigate(); + + const { projectName } = useParams(); + const { featureViewName } = useParams(); + + const fvName = featureViewName === undefined ? "" : featureViewName; + + const relationshipQuery = useLoadRelationshipData(); + + const fsNames = relationshipQuery.data + ? relationshipQuery.data.filter(whereFSconsumesThisFv(fvName)).map((fs) => { + return fs.target.name; + }) + : []; + const numOfFs = fsNames.length; + + return ( + + + + + + {data.spec.batchSource.meta ? ( + + + + ) : ( + No batchSource specified on this feature view. + )} + {data.meta.lastUpdatedTimestamp && ( + + + + )} + + + + + + +

Features ({data.spec.features.length})

+
+ + {data.spec.features ? ( + + ) : ( + No Tags specified on this feature view. + )} +
+
+ + + +

Entities

+
+ + {data.spec.entities ? ( + + {data.spec.entities.map((entity) => { + return ( + + { + navigate(`/p/${projectName}/entity/${entity}`); + }} + onClickAriaLabel={entity} + data-test-sub="testExample1" + > + {entity} + + + ); + })} + + ) : ( + No Entities. + )} +
+ + + +

Consuming Feature Services

+
+ + {fsNames.length > 0 ? ( + + ) : ( + No services consume this feature view + )} +
+ + + +

Tags

+
+ + {data.spec.tags ? ( + { + return ( + `/p/${projectName}/feature-view?` + + encodeSearchQueryString(`${key}:${value}`) + ); + }} + /> + ) : ( + No Tags specified on this feature view. + )} +
+
+
+ + + + + +

Batch Source

+
+ + +
+
+
+ + + +

Materialization Intervals

+
+ + {data.meta.materializationIntervals?.map((interval, i) => { + return ( +

+ {interval.startTime.toLocaleDateString("en-CA")} to{" "} + {interval.endTime.toLocaleDateString("en-CA")} +

+ ); + })} +
+
+
+ ); +}; + +export default RegularFeatureViewOverviewTab; diff --git a/ui/src/pages/feature-views/components/FeatureViewProjectionDisplayPanel.tsx b/ui/src/pages/feature-views/components/FeatureViewProjectionDisplayPanel.tsx new file mode 100644 index 0000000000..7b110f326d --- /dev/null +++ b/ui/src/pages/feature-views/components/FeatureViewProjectionDisplayPanel.tsx @@ -0,0 +1,47 @@ +import React from "react"; +import { EuiBasicTable, EuiPanel, EuiText, EuiTitle } from "@elastic/eui"; + +import { FeatureViewProjectionType } from "../../../parsers/feastODFVS"; +import { useParams } from "react-router-dom"; +import EuiCustomLink from "../../../components/EuiCustomLink"; + +interface RequestDataDisplayPanelProps extends FeatureViewProjectionType {} + +const FeatureViewProjectionDisplayPanel = ({ + featureViewProjection, +}: RequestDataDisplayPanelProps) => { + const { projectName } = useParams(); + + const columns = [ + { + name: "Column Name", + field: "name", + }, + { + name: "Type", + field: "valueType", + }, + ]; + + return ( + + + Feature View + + + + {featureViewProjection.featureViewName} + + + + + ); +}; + +export default FeatureViewProjectionDisplayPanel; diff --git a/ui/src/pages/feature-views/components/RequestDataDisplayPanel.tsx b/ui/src/pages/feature-views/components/RequestDataDisplayPanel.tsx new file mode 100644 index 0000000000..a6e546d9d8 --- /dev/null +++ b/ui/src/pages/feature-views/components/RequestDataDisplayPanel.tsx @@ -0,0 +1,52 @@ +import React from "react"; +import { EuiBasicTable, EuiPanel, EuiText, EuiTitle } from "@elastic/eui"; +import { useParams } from "react-router-dom"; +import { RequestDataSourceType } from "../../../parsers/feastODFVS"; +import EuiCustomLink from "../../../components/EuiCustomLink"; + +interface RequestDataDisplayPanelProps extends RequestDataSourceType {} + +const RequestDataDisplayPanel = ({ + requestDataSource, +}: RequestDataDisplayPanelProps) => { + const { projectName } = useParams(); + + const items = Object.entries(requestDataSource.requestDataOptions.schema).map( + ([key, type]) => { + return { + key, + type, + }; + } + ); + + const columns = [ + { + name: "Key", + field: "key", + }, + { + name: "Type", + field: "type", + }, + ]; + + return ( + + + Request Data + + + + {requestDataSource.name} + + + + + ); +}; + +export default RequestDataDisplayPanel; diff --git a/ui/src/pages/feature-views/useLoadFeatureView.ts b/ui/src/pages/feature-views/useLoadFeatureView.ts new file mode 100644 index 0000000000..ded7900ea9 --- /dev/null +++ b/ui/src/pages/feature-views/useLoadFeatureView.ts @@ -0,0 +1,55 @@ +import { useContext } from "react"; +import RegistryPathContext from "../../contexts/RegistryPathContext"; +import useLoadRegistry from "../../queries/useLoadRegistry"; + +const useLoadFeatureView = (featureViewName: string) => { + const registryUrl = useContext(RegistryPathContext); + const registryQuery = useLoadRegistry(registryUrl); + + const data = + registryQuery.data === undefined + ? undefined + : registryQuery.data.mergedFVMap[featureViewName]; + + return { + ...registryQuery, + data, + }; +}; + +const useLoadRegularFeatureView = (featureViewName: string) => { + const registryUrl = useContext(RegistryPathContext); + const registryQuery = useLoadRegistry(registryUrl); + + const data = + registryQuery.data === undefined + ? undefined + : registryQuery.data.objects.featureViews?.find((fv) => { + return fv.spec.name === featureViewName; + }); + + return { + ...registryQuery, + data, + }; +}; + +const useLoadOnDemandFeatureView = (featureViewName: string) => { + const registryUrl = useContext(RegistryPathContext); + const registryQuery = useLoadRegistry(registryUrl); + + const data = + registryQuery.data === undefined + ? undefined + : registryQuery.data.objects.onDemandFeatureViews?.find((fv) => { + return fv.spec.name === featureViewName; + }); + + return { + ...registryQuery, + data, + }; +}; + +export default useLoadFeatureView; +export { useLoadRegularFeatureView, useLoadOnDemandFeatureView }; diff --git a/ui/src/pages/saved-data-sets/DatasetExpectationsTab.tsx b/ui/src/pages/saved-data-sets/DatasetExpectationsTab.tsx new file mode 100644 index 0000000000..10ebb87297 --- /dev/null +++ b/ui/src/pages/saved-data-sets/DatasetExpectationsTab.tsx @@ -0,0 +1,41 @@ +import React from "react"; +import { EuiPanel } from "@elastic/eui"; +import { useParams } from "react-router-dom"; +import useLoadDataset from "./useLoadDataset"; + +const DatasetExpectationsTab = () => { + let { datasetName } = useParams(); + + if (!datasetName) { + throw new Error("Unable to get dataset name."); + } + const { isSuccess, data } = useLoadDataset(datasetName); + + if (!data || !data.spec.profile) { + return ( + + No data so sad + + ); + } + + let expectationsData; + + try { + expectationsData = JSON.parse(data.spec.profile); + } catch (e) { + throw new Error(`Unable to parse Expectations Profile: ${e}`); + } + + return isSuccess && expectationsData ? ( + +
{JSON.stringify(expectationsData, null, 2)}
+
+ ) : ( + + No data so sad + + ); +}; + +export default DatasetExpectationsTab; diff --git a/ui/src/pages/saved-data-sets/DatasetFeaturesTable.tsx b/ui/src/pages/saved-data-sets/DatasetFeaturesTable.tsx new file mode 100644 index 0000000000..6f670f37f5 --- /dev/null +++ b/ui/src/pages/saved-data-sets/DatasetFeaturesTable.tsx @@ -0,0 +1,28 @@ +import { EuiBasicTable } from "@elastic/eui"; +import React from "react"; + +interface DatasetFeatureEntry { + featureName: string; + featureViewName: string; +} + +interface DatasetFeaturesTableProps { + features: DatasetFeatureEntry[]; +} + +const DatasetFeaturesTable = ({ features }: DatasetFeaturesTableProps) => { + const columns = [ + { + name: "Feature", + field: "featureName", + }, + { + name: "Sourc Feature View", + field: "featureViewName", + }, + ]; + + return ; +}; + +export default DatasetFeaturesTable; diff --git a/ui/src/pages/saved-data-sets/DatasetInstance.tsx b/ui/src/pages/saved-data-sets/DatasetInstance.tsx new file mode 100644 index 0000000000..26df0b3eb3 --- /dev/null +++ b/ui/src/pages/saved-data-sets/DatasetInstance.tsx @@ -0,0 +1,72 @@ +import React from "react"; +import { Route, Routes, useNavigate, useParams } from "react-router-dom"; +import { + EuiPageHeader, + EuiPageContent, + EuiPageContentBody, +} from "@elastic/eui"; + +import { DatasetIcon32 } from "../../graphics/DatasetIcon"; + +import { useMatchExact, useMatchSubpath } from "../../hooks/useMatchSubpath"; +import DatasetOverviewTab from "./DatasetOverviewTab"; +import { useDocumentTitle } from "../../hooks/useDocumentTitle"; +import DatasetExpectationsTab from "./DatasetExpectationsTab"; +import { + useDatasetCustomTabs, + useDataSourceCustomTabRoutes, +} from "../../custom-tabs/TabsRegistryContext"; + +const DatasetInstance = () => { + const navigate = useNavigate(); + let { datasetName } = useParams(); + + useDocumentTitle(`${datasetName} | Saved Datasets | Feast`); + + const { customNavigationTabs } = useDatasetCustomTabs(navigate); + const CustomTabRoutes = useDataSourceCustomTabRoutes(); + + return ( + + { + navigate(""); + }, + }, + { + label: "Expectations", + isSelected: useMatchSubpath("expectations"), + onClick: () => { + navigate("expectations"); + }, + }, + ...customNavigationTabs, + ]} + /> + + + + } /> + } /> + {CustomTabRoutes} + + + + + ); +}; + +export default DatasetInstance; diff --git a/ui/src/pages/saved-data-sets/DatasetJoinKeysTable.tsx b/ui/src/pages/saved-data-sets/DatasetJoinKeysTable.tsx new file mode 100644 index 0000000000..e820691614 --- /dev/null +++ b/ui/src/pages/saved-data-sets/DatasetJoinKeysTable.tsx @@ -0,0 +1,23 @@ +import { EuiBasicTable } from "@elastic/eui"; +import React from "react"; + +interface DatasetJoinKey { + name: string; +} + +interface DatasetJoinKeysTableProps { + joinKeys: DatasetJoinKey[]; +} + +const DatasetJoinKeysTable = ({ joinKeys }: DatasetJoinKeysTableProps) => { + const columns = [ + { + name: "Name", + field: "name", + }, + ]; + + return ; +}; + +export default DatasetJoinKeysTable; diff --git a/ui/src/pages/saved-data-sets/DatasetOverviewTab.tsx b/ui/src/pages/saved-data-sets/DatasetOverviewTab.tsx new file mode 100644 index 0000000000..a20c83b1e2 --- /dev/null +++ b/ui/src/pages/saved-data-sets/DatasetOverviewTab.tsx @@ -0,0 +1,105 @@ +import { + EuiFlexGroup, + EuiHorizontalRule, + EuiLoadingSpinner, + EuiTitle, + EuiPanel, + EuiFlexItem, + EuiSpacer, + EuiDescriptionList, + EuiDescriptionListTitle, + EuiDescriptionListDescription, +} from "@elastic/eui"; +import React from "react"; +import { useParams } from "react-router-dom"; +import DatasetFeaturesTable from "./DatasetFeaturesTable"; +import DatasetJoinKeysTable from "./DatasetJoinKeysTable"; +import useLoadDataset from "./useLoadDataset"; + +const EntityOverviewTab = () => { + let { datasetName } = useParams(); + + if (!datasetName) { + throw new Error( + "Route doesn't have a 'datasetName' part. This route is likely rendering the wrong component." + ); + } + + const { isLoading, isSuccess, isError, data } = useLoadDataset(datasetName); + const isEmpty = data === undefined; + + return ( + + {isLoading && ( + + Loading + + )} + {isEmpty &&

No dataset with name: {datasetName}

} + {isError &&

Error loading dataset: {datasetName}

} + {isSuccess && data && ( + + + + + +

Features

+
+ + { + const [featureViewName, featureName] = + joinedName.split(":"); + + return { + featureViewName, + featureName, + }; + })} + /> +
+ + + +

Join Keys

+
+ + { + return { name: joinKey }; + })} + /> +
+
+ + + +

Properties

+
+ + + + Source Feature Service + + + {data.spec.featureService} + + +
+ + + + Created + + {data.meta.createdTimestamp.toLocaleDateString("en-CA")} + + + +
+
+
+ )} +
+ ); +}; +export default EntityOverviewTab; diff --git a/ui/src/pages/saved-data-sets/DatasetRawData.tsx b/ui/src/pages/saved-data-sets/DatasetRawData.tsx new file mode 100644 index 0000000000..9f335800e9 --- /dev/null +++ b/ui/src/pages/saved-data-sets/DatasetRawData.tsx @@ -0,0 +1,25 @@ +import React from "react"; +import { EuiPanel } from "@elastic/eui"; +import { useParams } from "react-router-dom"; +import useLoadDataset from "./useLoadDataset"; + +const EntityRawData = () => { + let { datasetName } = useParams(); + + if (!datasetName) { + throw new Error("Unable to get dataset name."); + } + const { isSuccess, data } = useLoadDataset(datasetName); + + return isSuccess && data ? ( + +
{JSON.stringify(data, null, 2)}
+
+ ) : ( + + No data so sad + + ); +}; + +export default EntityRawData; diff --git a/ui/src/pages/saved-data-sets/DatasetsIndexEmptyState.tsx b/ui/src/pages/saved-data-sets/DatasetsIndexEmptyState.tsx new file mode 100644 index 0000000000..44f56ba2dd --- /dev/null +++ b/ui/src/pages/saved-data-sets/DatasetsIndexEmptyState.tsx @@ -0,0 +1,42 @@ +import React from "react"; +import { EuiEmptyPrompt, EuiTitle, EuiLink, EuiButton } from "@elastic/eui"; +import FeastIconBlue from "../../graphics/FeastIconBlue"; + +const DatasetsIndexEmptyState = () => { + return ( + There are no saved datasets} + body={ +

+ You currently do not have any saved datasets. Learn more about + creating saved datasets in Feast Docs. +

+ } + actions={ + { + window.open( + "https://docs.feast.dev/getting-started/concepts/dataset#creating-saved-dataset-from-historical-retrieval", + "_blank" + ); + }} + > + Open Dataset Docs + + } + footer={ + <> + +

Want to learn more?

+
+ + Read Feast documentation + + + } + /> + ); +}; + +export default DatasetsIndexEmptyState; diff --git a/ui/src/pages/saved-data-sets/DatasetsListingTable.tsx b/ui/src/pages/saved-data-sets/DatasetsListingTable.tsx new file mode 100644 index 0000000000..97d11b0b24 --- /dev/null +++ b/ui/src/pages/saved-data-sets/DatasetsListingTable.tsx @@ -0,0 +1,53 @@ +import React from "react"; +import { EuiBasicTable } from "@elastic/eui"; +import EuiCustomLink from "../../components/EuiCustomLink"; +import { useParams } from "react-router-dom"; +import { FeastSavedDatasetType } from "../../parsers/feastSavedDataset"; + +interface DatasetsListingTableProps { + datasets: FeastSavedDatasetType[]; +} + +const DatasetsListingTable = ({ datasets }: DatasetsListingTableProps) => { + const { projectName } = useParams(); + + const columns = [ + { + name: "Name", + field: "spec.name", + sortable: true, + render: (name: string) => { + return ( + + {name} + + ); + }, + }, + { + name: "Source Feature Service", + field: "spec.featureService", + }, + { + name: "Created", + render: (item: FeastSavedDatasetType) => { + return item.meta.createdTimestamp.toLocaleDateString("en-CA"); + }, + }, + ]; + + const getRowProps = (item: FeastSavedDatasetType) => { + return { + "data-test-subj": `row-${item.spec.name}`, + }; + }; + + return ( + + ); +}; + +export default DatasetsListingTable; diff --git a/ui/src/pages/saved-data-sets/Index.tsx b/ui/src/pages/saved-data-sets/Index.tsx new file mode 100644 index 0000000000..5f20621baf --- /dev/null +++ b/ui/src/pages/saved-data-sets/Index.tsx @@ -0,0 +1,67 @@ +import React, { useContext } from "react"; + +import { + EuiPageHeader, + EuiPageContent, + EuiPageContentBody, + EuiLoadingSpinner, +} from "@elastic/eui"; + +import { DatasetIcon32 } from "../../graphics/DatasetIcon"; + +import useLoadRegistry from "../../queries/useLoadRegistry"; +import { useDocumentTitle } from "../../hooks/useDocumentTitle"; +import RegistryPathContext from "../../contexts/RegistryPathContext"; +import DatasetsListingTable from "./DatasetsListingTable"; +import DatasetsIndexEmptyState from "./DatasetsIndexEmptyState"; + +const useLoadSavedDataSets = () => { + const registryUrl = useContext(RegistryPathContext); + const registryQuery = useLoadRegistry(registryUrl); + + const data = + registryQuery.data === undefined + ? undefined + : registryQuery.data.objects.savedDatasets; + + return { + ...registryQuery, + data, + }; +}; + +const Index = () => { + const { isLoading, isSuccess, isError, data } = useLoadSavedDataSets(); + + useDocumentTitle(`Saved Datasets | Feast`); + + return ( + + + + + {isLoading && ( +

+ Loading +

+ )} + {isError &&

We encountered an error while loading.

} + {isSuccess && data && } + {isSuccess && !data && } +
+
+
+ ); +}; + +export default Index; diff --git a/ui/src/pages/saved-data-sets/useLoadDataset.ts b/ui/src/pages/saved-data-sets/useLoadDataset.ts new file mode 100644 index 0000000000..a3dbd3225d --- /dev/null +++ b/ui/src/pages/saved-data-sets/useLoadDataset.ts @@ -0,0 +1,22 @@ +import { useContext } from "react"; +import RegistryPathContext from "../../contexts/RegistryPathContext"; +import useLoadRegistry from "../../queries/useLoadRegistry"; + +const useLoadEntity = (entityName: string) => { + const registryUrl = useContext(RegistryPathContext); + const registryQuery = useLoadRegistry(registryUrl); + + const data = + registryQuery.data === undefined + ? undefined + : registryQuery.data.objects.savedDatasets?.find( + (fv) => fv.spec.name === entityName + ); + + return { + ...registryQuery, + data, + }; +}; + +export default useLoadEntity; diff --git a/ui/src/parsers/feastDatasources.ts b/ui/src/parsers/feastDatasources.ts new file mode 100644 index 0000000000..a9f58d716c --- /dev/null +++ b/ui/src/parsers/feastDatasources.ts @@ -0,0 +1,28 @@ +import { z } from "zod"; +import { FEAST_FEATURE_VALUE_TYPES } from "./types"; + +const FeastDatasourceSchema = z.object({ + type: z.string(), + eventTimestampColumn: z.string().optional(), + createdTimestampColumn: z.string().optional(), + fileOptions: z.object({ + fileUrl: z.string().optional(), + }).optional(), + name: z.string(), + meta: z.object({ + latestEventTimestamp: z.string().transform((val) => new Date(val)), + earliestEventTimestamp: z.string().transform((val) => new Date(val)), + }).optional(), + requestDataOptions: z.object({ + schema: z.record(z.nativeEnum(FEAST_FEATURE_VALUE_TYPES)), + }).optional(), + bigqueryOptions: z.object({ + tableRef: z.string().optional(), + dbtModelSerialized: z.string().optional() + }).optional(), +}); + +type FeastDatasourceType = z.infer; + +export { FeastDatasourceSchema }; +export type { FeastDatasourceType }; diff --git a/ui/src/parsers/feastEntities.ts b/ui/src/parsers/feastEntities.ts new file mode 100644 index 0000000000..3dddb8dca1 --- /dev/null +++ b/ui/src/parsers/feastEntities.ts @@ -0,0 +1,21 @@ +import { z } from "zod"; +import { FEAST_FEATURE_VALUE_TYPES } from "./types"; + +const FeastEntitySchema = z.object({ + spec: z.object({ + name: z.string(), + valueType: z.nativeEnum(FEAST_FEATURE_VALUE_TYPES), + joinKey: z.string(), + description: z.string().optional(), + labels: z.record(z.string()).optional(), + }), + meta: z.object({ + createdTimestamp: z.string().transform((val) => new Date(val)).optional(), + lastUpdatedTimestamp: z.string().transform((val) => new Date(val)).optional(), + }), +}); + +type FeastEntityType = z.infer; + +export { FeastEntitySchema }; +export type { FeastEntityType }; diff --git a/ui/src/parsers/feastFeatureServices.ts b/ui/src/parsers/feastFeatureServices.ts new file mode 100644 index 0000000000..96c03e38ef --- /dev/null +++ b/ui/src/parsers/feastFeatureServices.ts @@ -0,0 +1,30 @@ +import { z } from "zod"; +import { FEAST_FEATURE_VALUE_TYPES } from "./types"; + +const FeatureColumnInService = z.object({ + name: z.string(), + valueType: z.nativeEnum(FEAST_FEATURE_VALUE_TYPES), +}); + +const FeatureInServiceSchema = z.object({ + featureViewName: z.string(), + featureColumns: z.array(FeatureColumnInService), +}); + +const FeastFeatureServiceSchema = z.object({ + spec: z.object({ + name: z.string(), + features: z.array(FeatureInServiceSchema), + tags: z.record(z.string()).optional(), + description: z.string().optional(), + }), + meta: z.object({ + createdTimestamp: z.string().transform((val) => new Date(val)), + }), +}); + +type FeastFeatureServiceType = z.infer; +type FeastFeatureInServiceType = z.infer; + +export { FeastFeatureServiceSchema }; +export type { FeastFeatureServiceType, FeastFeatureInServiceType }; diff --git a/ui/src/parsers/feastFeatureViews.ts b/ui/src/parsers/feastFeatureViews.ts new file mode 100644 index 0000000000..c8cdadd25c --- /dev/null +++ b/ui/src/parsers/feastFeatureViews.ts @@ -0,0 +1,59 @@ +import { z } from "zod"; +import { FEAST_FEATURE_VALUE_TYPES } from "./types"; + +const FeastFeatureColumnSchema = z.object({ + name: z.string(), + valueType: z.nativeEnum(FEAST_FEATURE_VALUE_TYPES), +}); + +const FeastBatchSourceSchema = z.object({ + type: z.string(), + eventTimestampColumn: z.string().optional(), + createdTimestampColumn: z.string().optional(), + fileOptions: z.object({ + fileUrl: z.string().optional(), + }).optional(), + name: z.string().optional(), + meta: z.object({ + earliestEventTimestamp: z.string().transform((val) => new Date(val)), + latestEventTimestamp: z.string().transform((val) => new Date(val)), + }).optional(), + requestDataOptions: z.object({ + schema: z.record(z.nativeEnum(FEAST_FEATURE_VALUE_TYPES)), + }).optional(), + bigqueryOptions: z.object({ + tableRef: z.string().optional(), + dbtModelSerialized: z.string().optional() + }).optional(), + dataSourceClassType: z.string(), +}); + +const FeastFeatureViewSchema = z.object({ + spec: z.object({ + name: z.string(), + entities: z.array(z.string()), + features: z.array(FeastFeatureColumnSchema), + ttl: z.string().transform((val) => parseInt(val)), + batchSource: FeastBatchSourceSchema, + online: z.boolean(), + tags: z.record(z.string()).optional(), + }), + meta: z.object({ + createdTimestamp: z.string().transform((val) => new Date(val)).optional(), + lastUpdatedTimestamp: z.string().transform((val) => new Date(val)).optional(), + materializationIntervals: z + .array( + z.object({ + startTime: z.string().transform((val) => new Date(val)), + endTime: z.string().transform((val) => new Date(val)), + }) + ) + .optional(), + }), +}); + +type FeastFeatureViewType = z.infer; +type FeastFeatureColumnType = z.infer; + +export { FeastFeatureViewSchema, FeastFeatureColumnSchema }; +export type { FeastFeatureViewType, FeastFeatureColumnType }; diff --git a/ui/src/parsers/feastODFVS.ts b/ui/src/parsers/feastODFVS.ts new file mode 100644 index 0000000000..ebac09e163 --- /dev/null +++ b/ui/src/parsers/feastODFVS.ts @@ -0,0 +1,48 @@ +import { z } from "zod"; +import { FeastFeatureColumnSchema } from "./feastFeatureViews"; +import { FEAST_FEATURE_VALUE_TYPES } from "./types"; + +const FeatureViewProjectionSchema = z.object({ + featureViewProjection: z.object({ + featureViewName: z.string(), + featureColumns: z.array(FeastFeatureColumnSchema), + }), +}); + +const RequestDataSourceSchema = z.object({ + requestDataSource: z.object({ + type: z.string(), + name: z.string(), + requestDataOptions: z.object({ + schema: z.record(z.nativeEnum(FEAST_FEATURE_VALUE_TYPES)), + }), + }), +}); + +const ODFVInputsSchema = z.union([ + FeatureViewProjectionSchema, + RequestDataSourceSchema, +]); + +const FeastODFVSchema = z.object({ + spec: z.object({ + name: z.string(), + features: z.array(FeastFeatureColumnSchema), + inputs: z.record(ODFVInputsSchema), + userDefinedFunction: z.object({ + name: z.string(), + body: z.string(), + }), + }), + meta: z.object({ + createdTimestamp: z.string().transform((val) => new Date(val)), + lastUpdatedTimestamp: z.string().transform((val) => new Date(val)), + }), +}); + +type FeastODFVType = z.infer; +type RequestDataSourceType = z.infer; +type FeatureViewProjectionType = z.infer; + +export { FeastODFVSchema }; +export type { FeastODFVType, RequestDataSourceType, FeatureViewProjectionType }; diff --git a/ui/src/parsers/feastRegistry.ts b/ui/src/parsers/feastRegistry.ts new file mode 100644 index 0000000000..98e4fccca2 --- /dev/null +++ b/ui/src/parsers/feastRegistry.ts @@ -0,0 +1,22 @@ +import { z } from "zod"; +import { FeastDatasourceSchema } from "./feastDatasources"; +import { FeastEntitySchema } from "./feastEntities"; +import { FeastFeatureServiceSchema } from "./feastFeatureServices"; +import { FeastFeatureViewSchema } from "./feastFeatureViews"; +import { FeastSavedDatasetSchema } from "./feastSavedDataset"; +import { FeastODFVSchema } from "./feastODFVS"; + +const FeastRegistrySchema = z.object({ + project: z.string(), + dataSources: z.array(FeastDatasourceSchema).optional(), + entities: z.array(FeastEntitySchema).optional(), + featureViews: z.array(FeastFeatureViewSchema).optional(), + onDemandFeatureViews: z.array(FeastODFVSchema).optional(), + featureServices: z.array(FeastFeatureServiceSchema).optional(), + savedDatasets: z.array(FeastSavedDatasetSchema).optional(), +}); + +type FeastRegistryType = z.infer; + +export { FeastRegistrySchema }; +export type { FeastRegistryType }; diff --git a/ui/src/parsers/feastSavedDataset.ts b/ui/src/parsers/feastSavedDataset.ts new file mode 100644 index 0000000000..2c97acda74 --- /dev/null +++ b/ui/src/parsers/feastSavedDataset.ts @@ -0,0 +1,37 @@ +import { z } from "zod"; + +const FeastSavedDatasetSchema = z.object({ + spec: z.object({ + name: z.string(), + features: z.array(z.string()), + joinKeys: z.array(z.string()), + storage: z.object({ + fileStorage: z.object({ + fileFormat: z.object({ + parquestFormat: z.object({}).optional(), + }), + fileUrl: z.string(), + }), + }), + featureService: z + .object({ + spec: z.object({ + name: z.string(), + }), + }) + .transform((obj) => { + return obj.spec.name; + }), + profile: z.string().optional(), + }), + meta: z.object({ + createdTimestamp: z.string().transform((val) => new Date(val)), + minEventTimestamp: z.string().transform((val) => new Date(val)), + maxEventTimestamp: z.string().transform((val) => new Date(val)), + }), +}); + +type FeastSavedDatasetType = z.infer; + +export { FeastSavedDatasetSchema }; +export type { FeastSavedDatasetType }; diff --git a/ui/src/parsers/featureViewSummaryStatistics.ts b/ui/src/parsers/featureViewSummaryStatistics.ts new file mode 100644 index 0000000000..f8eca669d3 --- /dev/null +++ b/ui/src/parsers/featureViewSummaryStatistics.ts @@ -0,0 +1,56 @@ +import { z } from "zod"; + +const histogramSchema = z.array( + z.object({ + x0: z.number(), + x1: z.number(), + count: z.number(), + }) +); + +const numericColumnSummaryStaticsSchema = z.object({ + name: z.string(), + valueType: z.literal("INT64"), + sampleValues: z.array(z.number()), + histogram: histogramSchema.optional(), + proportionOfZeros: z.number().optional(), + proportionMissing: z.number().optional(), + min: z.number().optional(), + max: z.number().optional(), +}); + +const stringColumnSummaryStaticsSchema = z.object({ + name: z.string(), + valueType: z.literal("STRING"), + sampleValues: z.array(z.string()), +}); + +const columnsSummaryStatisticsSchema = z.union([ + numericColumnSummaryStaticsSchema, + stringColumnSummaryStaticsSchema, +]); + +const featureViewSummaryStatisticsSchema = z.object({ + columnsSummaryStatistics: z.record(columnsSummaryStatisticsSchema), +}); + +type FeatureViewSummaryStatisticsType = z.infer< + typeof featureViewSummaryStatisticsSchema +>; + +type NumericColumnSummaryStatisticType = z.infer< + typeof numericColumnSummaryStaticsSchema +>; +type StringColumnSummaryStatisticType = z.infer< + typeof stringColumnSummaryStaticsSchema +>; + +type HistogramDataType = z.infer; + +export { featureViewSummaryStatisticsSchema }; +export type { + FeatureViewSummaryStatisticsType, + HistogramDataType, + NumericColumnSummaryStatisticType, + StringColumnSummaryStatisticType, +}; diff --git a/ui/src/parsers/mergedFVTypes.ts b/ui/src/parsers/mergedFVTypes.ts new file mode 100644 index 0000000000..6a53b18e94 --- /dev/null +++ b/ui/src/parsers/mergedFVTypes.ts @@ -0,0 +1,63 @@ +import { + FeastFeatureColumnType, + FeastFeatureViewType, +} from "./feastFeatureViews"; +import { FeastODFVType } from "./feastODFVS"; +import { FeastRegistryType } from "./feastRegistry"; + +enum FEAST_FV_TYPES { + regular = "regular", + ondemand = "ondemand", +} + +interface regularFVInterface { + name: string; + type: FEAST_FV_TYPES.regular; + features: FeastFeatureColumnType[]; + object: FeastFeatureViewType; +} + +interface ODFVInterface { + name: string; + type: FEAST_FV_TYPES.ondemand; + features: FeastFeatureColumnType[]; + object: FeastODFVType; +} + +type genericFVType = regularFVInterface | ODFVInterface; + +const mergedFVTypes = (objects: FeastRegistryType) => { + const mergedFVMap: Record = {}; + + const mergedFVList: genericFVType[] = []; + + objects.featureViews?.forEach((fv) => { + const obj: genericFVType = { + name: fv.spec.name, + type: FEAST_FV_TYPES.regular, + features: fv.spec.features, + object: fv, + }; + + mergedFVMap[fv.spec.name] = obj; + mergedFVList.push(obj); + }); + + objects.onDemandFeatureViews?.forEach((odfv) => { + const obj: genericFVType = { + name: odfv.spec.name, + type: FEAST_FV_TYPES.ondemand, + features: odfv.spec.features, + object: odfv, + }; + + mergedFVMap[odfv.spec.name] = obj; + mergedFVList.push(obj); + }); + + return { mergedFVMap, mergedFVList }; +}; + +export default mergedFVTypes; +export { FEAST_FV_TYPES }; +export type { genericFVType, regularFVInterface, ODFVInterface }; diff --git a/ui/src/parsers/parseEntityRelationships.ts b/ui/src/parsers/parseEntityRelationships.ts new file mode 100644 index 0000000000..bf82e86ff9 --- /dev/null +++ b/ui/src/parsers/parseEntityRelationships.ts @@ -0,0 +1,95 @@ +import { FeastRegistryType } from "./feastRegistry"; +import { FEAST_FCO_TYPES } from "./types"; + +interface EntityReference { + type: FEAST_FCO_TYPES; + name: string; +} + +interface EntityRelation { + source: EntityReference; + target: EntityReference; +} + +const parseEntityRelationships = (objects: FeastRegistryType) => { + const links: EntityRelation[] = []; + + objects.featureServices?.forEach((fs) => { + fs.spec.features.forEach((feature) => { + links.push({ + source: { + type: FEAST_FCO_TYPES["featureView"], + name: feature.featureViewName, + }, + target: { + type: FEAST_FCO_TYPES["featureService"], + name: fs.spec.name, + }, + }); + }); + }); + + objects.featureViews?.forEach((fv) => { + fv.spec.entities.forEach((ent) => { + links.push({ + source: { + type: FEAST_FCO_TYPES["entity"], + name: ent, + }, + target: { + type: FEAST_FCO_TYPES["featureView"], + name: fv.spec.name, + }, + }); + }); + if (fv.spec.batchSource) { + links.push({ + source: { + type: FEAST_FCO_TYPES["dataSource"], + name: fv.spec.batchSource.name || '' + }, + target: { + type: FEAST_FCO_TYPES["featureView"], + name: fv.spec.name, + } + }) + } + }); + + objects.onDemandFeatureViews?.forEach((fv) => { + Object.values(fv.spec.inputs).forEach((input: { [key: string]: any }) => { + if (input.requestDataSource) { + links.push({ + source: { + type: FEAST_FCO_TYPES["dataSource"], + name: input.requestDataSource.name, + }, + target: { + type: FEAST_FCO_TYPES["featureView"], + name: fv.spec.name, + }, + }); + } else if (input.featureViewProjection?.featureViewName) { + const source_fv = objects.featureViews?.find(el => el.spec.name === input.featureViewProjection.featureViewName); + if (!source_fv) { + return; + } + links.push({ + source: { + type: FEAST_FCO_TYPES["dataSource"], + name: source_fv?.spec.batchSource.name || '', + }, + target: { + type: FEAST_FCO_TYPES["featureView"], + name: fv.spec.name, + }, + }); + } + }); + }); + + return links; +}; + +export default parseEntityRelationships; +export type { EntityRelation, EntityReference }; diff --git a/ui/src/parsers/parseIndirectRelationships.ts b/ui/src/parsers/parseIndirectRelationships.ts new file mode 100644 index 0000000000..d7d532ad3e --- /dev/null +++ b/ui/src/parsers/parseIndirectRelationships.ts @@ -0,0 +1,33 @@ +import { FeastRegistryType } from "./feastRegistry"; +import { EntityRelation } from "./parseEntityRelationships"; +import { FEAST_FCO_TYPES } from "./types"; + +const parseIndirectRelationships = ( + relationships: EntityRelation[], + objects: FeastRegistryType +) => { + const indirectLinks: EntityRelation[] = []; + + // Only contains Entity -> FS or DS -> FS relationships + objects.featureServices?.forEach((featureService) => { + featureService.spec.features.forEach((featureView) => { + relationships + .filter( + (relationship) => + relationship.target.name === featureView.featureViewName + ) + .forEach((relationship) => { + indirectLinks.push({ + source: relationship.source, + target: { + type: FEAST_FCO_TYPES["featureService"], + name: featureService.spec.name, + }, + }); + }); + }); + }); + return indirectLinks; +}; + +export default parseIndirectRelationships; diff --git a/ui/src/parsers/types.ts b/ui/src/parsers/types.ts new file mode 100644 index 0000000000..2f88eea4f0 --- /dev/null +++ b/ui/src/parsers/types.ts @@ -0,0 +1,29 @@ +enum FEAST_FCO_TYPES { + dataSource = "dataSource", + entity = "entity", + featureView = "featureView", + featureService = "featureService", +} + +enum FEAST_FEATURE_VALUE_TYPES { + FLOAT = "FLOAT", + INT64 = "INT64", + STRING = "STRING", + BOOL = "BOOL", + BYTES = "BYTES", + INT32 = "INT32", + DOUBLE = "DOUBLE", + UNIX_TIMESTAMP = "UNIX_TIMESTAMP", + INVALID = "INVALID", + BYTES_LIST = "BYTES_LIST", + STRING_LIST = "STRING_LIST", + INT32_LIST = "INT32_LIST", + INT64_LIST = "INT64_LIST", + DOUBLE_LIST = "DOUBLE_LIST", + FLOAT_LIST = "FLOAT_LIST", + BOOL_LIST = "BOOL_LIST", + UNIX_TIMESTAMP_LIST = "UNIX_TIMESTAMP_LIST", + NULL = "NULL" +} + +export { FEAST_FCO_TYPES, FEAST_FEATURE_VALUE_TYPES }; diff --git a/ui/src/queries/useLoadFeatureViewSummaryStatistics.ts b/ui/src/queries/useLoadFeatureViewSummaryStatistics.ts new file mode 100644 index 0000000000..0604029866 --- /dev/null +++ b/ui/src/queries/useLoadFeatureViewSummaryStatistics.ts @@ -0,0 +1,37 @@ +import { useQuery } from "react-query"; +import { useParams } from "react-router-dom"; +import { + featureViewSummaryStatisticsSchema, + FeatureViewSummaryStatisticsType, +} from "../parsers/featureViewSummaryStatistics"; + +const useLoadFeatureViewSummaryStatistics = (featureViewName: string) => { + const { projectName } = useParams(); + + const queryKey = `featureViewSummaryStatistics:${featureViewName}`; + const url = `/metadata/${projectName}/featureView/${featureViewName}.json`; + + return useQuery( + queryKey, + () => { + return fetch(url, { + headers: { + "Content-Type": "application/json", + }, + }) + .then((res) => { + return res.json(); + }) + .then((json) => { + const summary = featureViewSummaryStatisticsSchema.parse(json); + + return summary; + }); + }, + { + staleTime: 15 * 60 * 1000, // Given that we are reading from a registry dump, this seems reasonable for now. + } + ); +}; + +export default useLoadFeatureViewSummaryStatistics; diff --git a/ui/src/queries/useLoadRegistry.ts b/ui/src/queries/useLoadRegistry.ts new file mode 100644 index 0000000000..ffb0675643 --- /dev/null +++ b/ui/src/queries/useLoadRegistry.ts @@ -0,0 +1,72 @@ +import { useQuery } from "react-query"; +import { + FeastRegistrySchema, + FeastRegistryType, +} from "../parsers/feastRegistry"; +import mergedFVTypes, { genericFVType } from "../parsers/mergedFVTypes"; +import parseEntityRelationships, { + EntityRelation, +} from "../parsers/parseEntityRelationships"; +import parseIndirectRelationships from "../parsers/parseIndirectRelationships"; + +interface FeatureStoreAllData { + project: string; + description?: string; + objects: FeastRegistryType; + relationships: EntityRelation[]; + mergedFVMap: Record; + mergedFVList: genericFVType[]; + indirectRelationships: EntityRelation[]; +} + +const useLoadRegistry = (url: string) => { + return useQuery( + `registry:${url}`, + () => { + return fetch(url, { + headers: { + "Content-Type": "application/json", + }, + }) + .then((res) => { + return res.json(); + }) + .then((json) => { + const objects = FeastRegistrySchema.parse(json); + + const { mergedFVMap, mergedFVList } = mergedFVTypes(objects); + + const relationships = parseEntityRelationships(objects); + + // Only contains Entity -> FS or DS -> FS relationships + const indirectRelationships = parseIndirectRelationships( + relationships, + objects + ); + + // console.log({ + // objects, + // mergedFVMap, + // mergedFVList, + // relationships, + // indirectRelationships, + // }); + + return { + project: objects.project, + objects, + mergedFVMap, + mergedFVList, + relationships, + indirectRelationships, + }; + }); + }, + { + staleTime: Infinity, // Given that we are reading from a registry dump, this seems reasonable for now. + } + ); +}; + +export default useLoadRegistry; +export type { FeatureStoreAllData }; diff --git a/ui/src/queries/useLoadRelationshipsData.ts b/ui/src/queries/useLoadRelationshipsData.ts new file mode 100644 index 0000000000..6f65af7e76 --- /dev/null +++ b/ui/src/queries/useLoadRelationshipsData.ts @@ -0,0 +1,20 @@ +import { useContext } from "react"; +import RegistryPathContext from "../contexts/RegistryPathContext"; +import useLoadRegistry from "./useLoadRegistry"; + +const useLoadRelationshipData = () => { + const registryUrl = useContext(RegistryPathContext); + const registryQuery = useLoadRegistry(registryUrl); + + const data = + registryQuery.data === undefined + ? undefined + : registryQuery.data.relationships; + + return { + ...registryQuery, + data, + }; +}; + +export default useLoadRelationshipData; diff --git a/ui/src/react-app-env.d.ts b/ui/src/react-app-env.d.ts new file mode 100644 index 0000000000..6431bc5fc6 --- /dev/null +++ b/ui/src/react-app-env.d.ts @@ -0,0 +1 @@ +/// diff --git a/ui/src/setupTests.ts b/ui/src/setupTests.ts new file mode 100644 index 0000000000..8f2609b7b3 --- /dev/null +++ b/ui/src/setupTests.ts @@ -0,0 +1,5 @@ +// jest-dom adds custom jest matchers for asserting on DOM nodes. +// allows you to do things like: +// expect(element).toHaveTextContent(/react/i) +// learn more: https://github.com/testing-library/jest-dom +import '@testing-library/jest-dom'; diff --git a/ui/src/test-utils.tsx b/ui/src/test-utils.tsx new file mode 100644 index 0000000000..c62770b928 --- /dev/null +++ b/ui/src/test-utils.tsx @@ -0,0 +1,37 @@ +import React from "react"; +import { render } from "@testing-library/react"; +import { QueryClient, QueryClientProvider } from "react-query"; +import { QueryParamProvider } from "use-query-params"; +import { MemoryRouter as Router } from "react-router-dom"; +import RouteAdapter from "./hacks/RouteAdapter"; + +interface ProvidersProps { + children: React.ReactElement; +} + +const queryClient = new QueryClient(); + +const AllTheProviders = ({ children }: ProvidersProps) => { + return ( + + + + {children} + + + + ); +}; + +const customRender = ( + ui: React.ReactElement, + options?: Record +) => render(ui, { wrapper: AllTheProviders, ...options }); + +// re-export everything +export * from "@testing-library/react"; + +// override render method +export { customRender as render }; diff --git a/ui/src/utils/custom-tabs/DataSourceCustomTabLoadingWrapper.tsx b/ui/src/utils/custom-tabs/DataSourceCustomTabLoadingWrapper.tsx new file mode 100644 index 0000000000..1ffc20f423 --- /dev/null +++ b/ui/src/utils/custom-tabs/DataSourceCustomTabLoadingWrapper.tsx @@ -0,0 +1,27 @@ +import React from "react"; +import { useParams } from "react-router-dom"; + +import { DataSourceCustomTabProps } from "../../custom-tabs/types"; +import useLoadDataSource from "../../pages/data-sources/useLoadDataSource"; + +interface DataSourceCustomTabLoadingWrapperProps { + Component: (props: DataSourceCustomTabProps) => JSX.Element; +} + +const DataSourceCustomTabLoadingWrapper = ({ + Component, +}: DataSourceCustomTabLoadingWrapperProps) => { + let { dataSourceName } = useParams(); + + if (!dataSourceName) { + throw new Error( + `This route has no 'dataSourceName' part. This route is likely not supposed to render this component.` + ); + } + + const feastObjectQuery = useLoadDataSource(dataSourceName); + + return ; +}; + +export default DataSourceCustomTabLoadingWrapper; diff --git a/ui/src/utils/custom-tabs/DatasetCustomTabLoadingWrapper.tsx b/ui/src/utils/custom-tabs/DatasetCustomTabLoadingWrapper.tsx new file mode 100644 index 0000000000..789b0563cc --- /dev/null +++ b/ui/src/utils/custom-tabs/DatasetCustomTabLoadingWrapper.tsx @@ -0,0 +1,27 @@ +import React from "react"; +import { useParams } from "react-router-dom"; + +import { DatasetCustomTabProps } from "../../custom-tabs/types"; +import useLoadDataset from "../../pages/saved-data-sets/useLoadDataset"; + +interface DatasetCustomTabLoadingWrapperProps { + Component: (props: DatasetCustomTabProps) => JSX.Element; +} + +const DatasetCustomTabLoadingWrapper = ({ + Component, +}: DatasetCustomTabLoadingWrapperProps) => { + let { datasetName } = useParams(); + + if (!datasetName) { + throw new Error( + "Route doesn't have a 'datasetName' part. This route is likely rendering the wrong component." + ); + } + + const feastObjectQuery = useLoadDataset(datasetName); + + return ; +}; + +export default DatasetCustomTabLoadingWrapper; diff --git a/ui/src/utils/custom-tabs/EntityCustomTabLoadingWrapper.tsx b/ui/src/utils/custom-tabs/EntityCustomTabLoadingWrapper.tsx new file mode 100644 index 0000000000..0aa95cafb5 --- /dev/null +++ b/ui/src/utils/custom-tabs/EntityCustomTabLoadingWrapper.tsx @@ -0,0 +1,27 @@ +import React from "react"; +import { useParams } from "react-router-dom"; + +import { EntityCustomTabProps } from "../../custom-tabs/types"; +import useLoadEntity from "../../pages/entities/useLoadEntity"; + +interface EntityCustomTabLoadingWrapperProps { + Component: (props: EntityCustomTabProps) => JSX.Element; +} + +const EntityCustomTabLoadingWrapper = ({ + Component, +}: EntityCustomTabLoadingWrapperProps) => { + let { entityName } = useParams(); + + if (!entityName) { + throw new Error( + `This route has no 'entityName' part. This route is likely not supposed to render this component.` + ); + } + + const feastObjectQuery = useLoadEntity(entityName); + + return ; +}; + +export default EntityCustomTabLoadingWrapper; diff --git a/ui/src/utils/custom-tabs/FeatureServiceCustomTabLoadingWrapper.tsx b/ui/src/utils/custom-tabs/FeatureServiceCustomTabLoadingWrapper.tsx new file mode 100644 index 0000000000..816c36988f --- /dev/null +++ b/ui/src/utils/custom-tabs/FeatureServiceCustomTabLoadingWrapper.tsx @@ -0,0 +1,29 @@ +import React from "react"; +import { useParams } from "react-router-dom"; + +import { FeatureServiceCustomTabProps } from "../../custom-tabs/types"; +import useLoadFeatureService from "../../pages/feature-services/useLoadFeatureService"; + +interface FeatureServiceCustomTabLoadingWrapperProps { + Component: (props: FeatureServiceCustomTabProps) => JSX.Element; +} + +const FeatureServiceCustomTabLoadingWrapper = ({ + Component, +}: FeatureServiceCustomTabLoadingWrapperProps) => { + const { featureServiceName } = useParams(); + + if (!featureServiceName) { + throw new Error( + `This route has no 'featureServiceName' part. This route is likely not supposed to render this component.` + ); + } + + const feastObjectQuery = useLoadFeatureService(featureServiceName); + + return ( + + ); +}; + +export default FeatureServiceCustomTabLoadingWrapper; diff --git a/ui/src/utils/custom-tabs/OnDemandFeatureViewCustomTabLoadingWrapper.tsx b/ui/src/utils/custom-tabs/OnDemandFeatureViewCustomTabLoadingWrapper.tsx new file mode 100644 index 0000000000..bd005b7a0a --- /dev/null +++ b/ui/src/utils/custom-tabs/OnDemandFeatureViewCustomTabLoadingWrapper.tsx @@ -0,0 +1,46 @@ +import React from "react"; + +import { useParams } from "react-router-dom"; +import useLoadFeatureView from "../../pages/feature-views/useLoadFeatureView"; +import { + OnDemandFeatureViewCustomTabProps, + OnDemandFeatureViewQueryReturnType, +} from "../../custom-tabs/types"; +import { FEAST_FV_TYPES } from "../../parsers/mergedFVTypes"; + +interface OnDemandFeatureViewCustomTabLoadingWrapperProps { + Component: (props: OnDemandFeatureViewCustomTabProps) => JSX.Element; +} + +const OnDemandFeatureViewCustomTabLoadingWrapper = ({ + Component, +}: OnDemandFeatureViewCustomTabLoadingWrapperProps) => { + const { featureViewName } = useParams(); + + if (!featureViewName) { + throw new Error( + `This route has no 'featureViewName' part. This route is likely not supposed to render this component.` + ); + } + + const feastObjectQuery = useLoadFeatureView(featureViewName); + + if ( + feastObjectQuery.isSuccess && + feastObjectQuery.data && + feastObjectQuery.data.type !== FEAST_FV_TYPES.ondemand + ) { + throw new Error( + `This should not happen. Somehow a custom tab on a ODFV page received data that does not have the shape?` + ); + } + + return ( + + ); +}; + +export default OnDemandFeatureViewCustomTabLoadingWrapper; diff --git a/ui/src/utils/custom-tabs/RegularFeatureViewCustomTabLoadingWrapper.tsx b/ui/src/utils/custom-tabs/RegularFeatureViewCustomTabLoadingWrapper.tsx new file mode 100644 index 0000000000..1a8288216a --- /dev/null +++ b/ui/src/utils/custom-tabs/RegularFeatureViewCustomTabLoadingWrapper.tsx @@ -0,0 +1,46 @@ +import React from "react"; + +import { useParams } from "react-router-dom"; +import useLoadFeatureView from "../../pages/feature-views/useLoadFeatureView"; +import { + RegularFeatureViewCustomTabProps, + RegularFeatureViewQueryReturnType, +} from "../../custom-tabs/types"; +import { FEAST_FV_TYPES } from "../../parsers/mergedFVTypes"; + +interface RegularFeatureViewCustomTabLoadingWrapperProps { + Component: (props: RegularFeatureViewCustomTabProps) => JSX.Element; +} + +const RegularFeatureViewCustomTabLoadingWrapper = ({ + Component, +}: RegularFeatureViewCustomTabLoadingWrapperProps) => { + const { featureViewName } = useParams(); + + if (!featureViewName) { + throw new Error( + `This route has no 'featureViewName' part. This route is likely not supposed to render this component.` + ); + } + + const feastObjectQuery = useLoadFeatureView(featureViewName); + + if ( + feastObjectQuery.isSuccess && + feastObjectQuery.data && + feastObjectQuery.data.type !== FEAST_FV_TYPES.regular + ) { + throw new Error( + `This should not happen. Somehow a custom tab on a Regular FV page received data that does not have the shape?` + ); + } + + return ( + + ); +}; + +export default RegularFeatureViewCustomTabLoadingWrapper; diff --git a/ui/tsconfig.json b/ui/tsconfig.json new file mode 100644 index 0000000000..a273b0cfc0 --- /dev/null +++ b/ui/tsconfig.json @@ -0,0 +1,26 @@ +{ + "compilerOptions": { + "target": "es5", + "lib": [ + "dom", + "dom.iterable", + "esnext" + ], + "allowJs": true, + "skipLibCheck": true, + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "strict": true, + "forceConsistentCasingInFileNames": true, + "noFallthroughCasesInSwitch": true, + "module": "esnext", + "moduleResolution": "node", + "resolveJsonModule": true, + "isolatedModules": true, + "noEmit": true, + "jsx": "react-jsx" + }, + "include": [ + "src" + ] +} diff --git a/ui/yarn.lock b/ui/yarn.lock new file mode 100644 index 0000000000..396204ffb4 --- /dev/null +++ b/ui/yarn.lock @@ -0,0 +1,10993 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"@ampproject/remapping@^2.1.0": + version "2.1.2" + resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.1.2.tgz#4edca94973ded9630d20101cd8559cedb8d8bd34" + integrity sha512-hoyByceqwKirw7w3Z7gnIIZC3Wx3J484Y3L/cMpXFbr7d9ZQj2mODrirNzcJa+SM3UlpWXYvKV4RlRpFXlWgXg== + dependencies: + "@jridgewell/trace-mapping" "^0.3.0" + +"@apideck/better-ajv-errors@^0.3.1": + version "0.3.2" + resolved "https://registry.yarnpkg.com/@apideck/better-ajv-errors/-/better-ajv-errors-0.3.2.tgz#cd6d3814eda8aee38ee2e3fa6457be43af4f8361" + integrity sha512-JdEazx7qiVqTBzzBl5rolRwl5cmhihjfIcpqRzIZjtT6b18liVmDn/VlWpqW4C/qP2hrFFMLRV1wlex8ZVBPTg== + dependencies: + json-schema "^0.4.0" + jsonpointer "^5.0.0" + leven "^3.1.0" + +"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.4", "@babel/code-frame@^7.12.13", "@babel/code-frame@^7.16.0", "@babel/code-frame@^7.16.7", "@babel/code-frame@^7.8.3": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.16.7.tgz#44416b6bd7624b998f5b1af5d470856c40138789" + integrity sha512-iAXqUn8IIeBTNd72xsFlgaXHkMBMt6y4HJp1tIaK465CWLT/fG1aqB7ykr95gHHmlBdGbFeWWfyB4NJJ0nmeIg== + dependencies: + "@babel/highlight" "^7.16.7" + +"@babel/compat-data@^7.13.11", "@babel/compat-data@^7.16.4", "@babel/compat-data@^7.16.8": + version "7.16.8" + resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.16.8.tgz#31560f9f29fdf1868de8cb55049538a1b9732a60" + integrity sha512-m7OkX0IdKLKPpBlJtF561YJal5y/jyI5fNfWbPxh2D/nbzzGI4qRyrD8xO2jB24u7l+5I2a43scCG2IrfjC50Q== + +"@babel/core@^7.1.0", "@babel/core@^7.11.1", "@babel/core@^7.12.3", "@babel/core@^7.16.0", "@babel/core@^7.7.2", "@babel/core@^7.8.0": + version "7.16.12" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.16.12.tgz#5edc53c1b71e54881315923ae2aedea2522bb784" + integrity sha512-dK5PtG1uiN2ikk++5OzSYsitZKny4wOCD0nrO4TqnW4BVBTQ2NGS3NgilvT/TEyxTST7LNyWV/T4tXDoD3fOgg== + dependencies: + "@babel/code-frame" "^7.16.7" + "@babel/generator" "^7.16.8" + "@babel/helper-compilation-targets" "^7.16.7" + "@babel/helper-module-transforms" "^7.16.7" + "@babel/helpers" "^7.16.7" + "@babel/parser" "^7.16.12" + "@babel/template" "^7.16.7" + "@babel/traverse" "^7.16.10" + "@babel/types" "^7.16.8" + convert-source-map "^1.7.0" + debug "^4.1.0" + gensync "^1.0.0-beta.2" + json5 "^2.1.2" + semver "^6.3.0" + source-map "^0.5.0" + +"@babel/core@^7.17.5": + version "7.17.5" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.17.5.tgz#6cd2e836058c28f06a4ca8ee7ed955bbf37c8225" + integrity sha512-/BBMw4EvjmyquN5O+t5eh0+YqB3XXJkYD2cjKpYtWOfFy4lQ4UozNSmxAcWT8r2XtZs0ewG+zrfsqeR15i1ajA== + dependencies: + "@ampproject/remapping" "^2.1.0" + "@babel/code-frame" "^7.16.7" + "@babel/generator" "^7.17.3" + "@babel/helper-compilation-targets" "^7.16.7" + "@babel/helper-module-transforms" "^7.16.7" + "@babel/helpers" "^7.17.2" + "@babel/parser" "^7.17.3" + "@babel/template" "^7.16.7" + "@babel/traverse" "^7.17.3" + "@babel/types" "^7.17.0" + convert-source-map "^1.7.0" + debug "^4.1.0" + gensync "^1.0.0-beta.2" + json5 "^2.1.2" + semver "^6.3.0" + +"@babel/eslint-parser@^7.16.3": + version "7.16.5" + resolved "https://registry.yarnpkg.com/@babel/eslint-parser/-/eslint-parser-7.16.5.tgz#48d3485091d6e36915358e4c0d0b2ebe6da90462" + integrity sha512-mUqYa46lgWqHKQ33Q6LNCGp/wPR3eqOYTUixHFsfrSQqRxH0+WOzca75iEjFr5RDGH1dDz622LaHhLOzOuQRUA== + dependencies: + eslint-scope "^5.1.1" + eslint-visitor-keys "^2.1.0" + semver "^6.3.0" + +"@babel/generator@^7.16.8", "@babel/generator@^7.7.2": + version "7.16.8" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.16.8.tgz#359d44d966b8cd059d543250ce79596f792f2ebe" + integrity sha512-1ojZwE9+lOXzcWdWmO6TbUzDfqLD39CmEhN8+2cX9XkDo5yW1OpgfejfliysR2AWLpMamTiOiAp/mtroaymhpw== + dependencies: + "@babel/types" "^7.16.8" + jsesc "^2.5.1" + source-map "^0.5.0" + +"@babel/generator@^7.17.3": + version "7.17.3" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.17.3.tgz#a2c30b0c4f89858cb87050c3ffdfd36bdf443200" + integrity sha512-+R6Dctil/MgUsZsZAkYgK+ADNSZzJRRy0TvY65T71z/CR854xHQ1EweBYXdfT+HNeN7w0cSJJEzgxZMv40pxsg== + dependencies: + "@babel/types" "^7.17.0" + jsesc "^2.5.1" + source-map "^0.5.0" + +"@babel/helper-annotate-as-pure@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.16.7.tgz#bb2339a7534a9c128e3102024c60760a3a7f3862" + integrity sha512-s6t2w/IPQVTAET1HitoowRGXooX8mCgtuP5195wD/QJPV6wYjpujCGF7JuMODVX2ZAJOf1GT6DT9MHEZvLOFSw== + dependencies: + "@babel/types" "^7.16.7" + +"@babel/helper-builder-binary-assignment-operator-visitor@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.16.7.tgz#38d138561ea207f0f69eb1626a418e4f7e6a580b" + integrity sha512-C6FdbRaxYjwVu/geKW4ZeQ0Q31AftgRcdSnZ5/jsH6BzCJbtvXvhpfkbkThYSuutZA7nCXpPR6AD9zd1dprMkA== + dependencies: + "@babel/helper-explode-assignable-expression" "^7.16.7" + "@babel/types" "^7.16.7" + +"@babel/helper-compilation-targets@^7.13.0", "@babel/helper-compilation-targets@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.16.7.tgz#06e66c5f299601e6c7da350049315e83209d551b" + integrity sha512-mGojBwIWcwGD6rfqgRXVlVYmPAv7eOpIemUG3dGnDdCY4Pae70ROij3XmfrH6Fa1h1aiDylpglbZyktfzyo/hA== + dependencies: + "@babel/compat-data" "^7.16.4" + "@babel/helper-validator-option" "^7.16.7" + browserslist "^4.17.5" + semver "^6.3.0" + +"@babel/helper-create-class-features-plugin@^7.16.10", "@babel/helper-create-class-features-plugin@^7.16.7": + version "7.16.10" + resolved "https://registry.yarnpkg.com/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.16.10.tgz#8a6959b9cc818a88815ba3c5474619e9c0f2c21c" + integrity sha512-wDeej0pu3WN/ffTxMNCPW5UCiOav8IcLRxSIyp/9+IF2xJUM9h/OYjg0IJLHaL6F8oU8kqMz9nc1vryXhMsgXg== + dependencies: + "@babel/helper-annotate-as-pure" "^7.16.7" + "@babel/helper-environment-visitor" "^7.16.7" + "@babel/helper-function-name" "^7.16.7" + "@babel/helper-member-expression-to-functions" "^7.16.7" + "@babel/helper-optimise-call-expression" "^7.16.7" + "@babel/helper-replace-supers" "^7.16.7" + "@babel/helper-split-export-declaration" "^7.16.7" + +"@babel/helper-create-regexp-features-plugin@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.16.7.tgz#0cb82b9bac358eb73bfbd73985a776bfa6b14d48" + integrity sha512-fk5A6ymfp+O5+p2yCkXAu5Kyj6v0xh0RBeNcAkYUMDvvAAoxvSKXn+Jb37t/yWFiQVDFK1ELpUTD8/aLhCPu+g== + dependencies: + "@babel/helper-annotate-as-pure" "^7.16.7" + regexpu-core "^4.7.1" + +"@babel/helper-define-polyfill-provider@^0.3.1": + version "0.3.1" + resolved "https://registry.yarnpkg.com/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.3.1.tgz#52411b445bdb2e676869e5a74960d2d3826d2665" + integrity sha512-J9hGMpJQmtWmj46B3kBHmL38UhJGhYX7eqkcq+2gsstyYt341HmPeWspihX43yVRA0mS+8GGk2Gckc7bY/HCmA== + dependencies: + "@babel/helper-compilation-targets" "^7.13.0" + "@babel/helper-module-imports" "^7.12.13" + "@babel/helper-plugin-utils" "^7.13.0" + "@babel/traverse" "^7.13.0" + debug "^4.1.1" + lodash.debounce "^4.0.8" + resolve "^1.14.2" + semver "^6.1.2" + +"@babel/helper-environment-visitor@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.16.7.tgz#ff484094a839bde9d89cd63cba017d7aae80ecd7" + integrity sha512-SLLb0AAn6PkUeAfKJCCOl9e1R53pQlGAfc4y4XuMRZfqeMYLE0dM1LMhqbGAlGQY0lfw5/ohoYWAe9V1yibRag== + dependencies: + "@babel/types" "^7.16.7" + +"@babel/helper-explode-assignable-expression@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.16.7.tgz#12a6d8522fdd834f194e868af6354e8650242b7a" + integrity sha512-KyUenhWMC8VrxzkGP0Jizjo4/Zx+1nNZhgocs+gLzyZyB8SHidhoq9KK/8Ato4anhwsivfkBLftky7gvzbZMtQ== + dependencies: + "@babel/types" "^7.16.7" + +"@babel/helper-function-name@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.16.7.tgz#f1ec51551fb1c8956bc8dd95f38523b6cf375f8f" + integrity sha512-QfDfEnIUyyBSR3HtrtGECuZ6DAyCkYFp7GHl75vFtTnn6pjKeK0T1DB5lLkFvBea8MdaiUABx3osbgLyInoejA== + dependencies: + "@babel/helper-get-function-arity" "^7.16.7" + "@babel/template" "^7.16.7" + "@babel/types" "^7.16.7" + +"@babel/helper-get-function-arity@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.16.7.tgz#ea08ac753117a669f1508ba06ebcc49156387419" + integrity sha512-flc+RLSOBXzNzVhcLu6ujeHUrD6tANAOU5ojrRx/as+tbzf8+stUCj7+IfRRoAbEZqj/ahXEMsjhOhgeZsrnTw== + dependencies: + "@babel/types" "^7.16.7" + +"@babel/helper-hoist-variables@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.16.7.tgz#86bcb19a77a509c7b77d0e22323ef588fa58c246" + integrity sha512-m04d/0Op34H5v7pbZw6pSKP7weA6lsMvfiIAMeIvkY/R4xQtBSMFEigu9QTZ2qB/9l22vsxtM8a+Q8CzD255fg== + dependencies: + "@babel/types" "^7.16.7" + +"@babel/helper-member-expression-to-functions@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.16.7.tgz#42b9ca4b2b200123c3b7e726b0ae5153924905b0" + integrity sha512-VtJ/65tYiU/6AbMTDwyoXGPKHgTsfRarivm+YbB5uAzKUyuPjgZSgAFeG87FCigc7KNHu2Pegh1XIT3lXjvz3Q== + dependencies: + "@babel/types" "^7.16.7" + +"@babel/helper-module-imports@^7.10.4", "@babel/helper-module-imports@^7.12.13", "@babel/helper-module-imports@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.16.7.tgz#25612a8091a999704461c8a222d0efec5d091437" + integrity sha512-LVtS6TqjJHFc+nYeITRo6VLXve70xmq7wPhWTqDJusJEgGmkAACWwMiTNrvfoQo6hEhFwAIixNkvB0jPXDL8Wg== + dependencies: + "@babel/types" "^7.16.7" + +"@babel/helper-module-transforms@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.16.7.tgz#7665faeb721a01ca5327ddc6bba15a5cb34b6a41" + integrity sha512-gaqtLDxJEFCeQbYp9aLAefjhkKdjKcdh6DB7jniIGU3Pz52WAmP268zK0VgPz9hUNkMSYeH976K2/Y6yPadpng== + dependencies: + "@babel/helper-environment-visitor" "^7.16.7" + "@babel/helper-module-imports" "^7.16.7" + "@babel/helper-simple-access" "^7.16.7" + "@babel/helper-split-export-declaration" "^7.16.7" + "@babel/helper-validator-identifier" "^7.16.7" + "@babel/template" "^7.16.7" + "@babel/traverse" "^7.16.7" + "@babel/types" "^7.16.7" + +"@babel/helper-optimise-call-expression@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.16.7.tgz#a34e3560605abbd31a18546bd2aad3e6d9a174f2" + integrity sha512-EtgBhg7rd/JcnpZFXpBy0ze1YRfdm7BnBX4uKMBd3ixa3RGAE002JZB66FJyNH7g0F38U05pXmA5P8cBh7z+1w== + dependencies: + "@babel/types" "^7.16.7" + +"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.13.0", "@babel/helper-plugin-utils@^7.14.5", "@babel/helper-plugin-utils@^7.16.7", "@babel/helper-plugin-utils@^7.8.0", "@babel/helper-plugin-utils@^7.8.3": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.16.7.tgz#aa3a8ab4c3cceff8e65eb9e73d87dc4ff320b2f5" + integrity sha512-Qg3Nk7ZxpgMrsox6HreY1ZNKdBq7K72tDSliA6dCl5f007jR4ne8iD5UzuNnCJH2xBf2BEEVGr+/OL6Gdp7RxA== + +"@babel/helper-remap-async-to-generator@^7.16.8": + version "7.16.8" + resolved "https://registry.yarnpkg.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.16.8.tgz#29ffaade68a367e2ed09c90901986918d25e57e3" + integrity sha512-fm0gH7Flb8H51LqJHy3HJ3wnE1+qtYR2A99K06ahwrawLdOFsCEWjZOrYricXJHoPSudNKxrMBUPEIPxiIIvBw== + dependencies: + "@babel/helper-annotate-as-pure" "^7.16.7" + "@babel/helper-wrap-function" "^7.16.8" + "@babel/types" "^7.16.8" + +"@babel/helper-replace-supers@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.16.7.tgz#e9f5f5f32ac90429c1a4bdec0f231ef0c2838ab1" + integrity sha512-y9vsWilTNaVnVh6xiJfABzsNpgDPKev9HnAgz6Gb1p6UUwf9NepdlsV7VXGCftJM+jqD5f7JIEubcpLjZj5dBw== + dependencies: + "@babel/helper-environment-visitor" "^7.16.7" + "@babel/helper-member-expression-to-functions" "^7.16.7" + "@babel/helper-optimise-call-expression" "^7.16.7" + "@babel/traverse" "^7.16.7" + "@babel/types" "^7.16.7" + +"@babel/helper-simple-access@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.16.7.tgz#d656654b9ea08dbb9659b69d61063ccd343ff0f7" + integrity sha512-ZIzHVyoeLMvXMN/vok/a4LWRy8G2v205mNP0XOuf9XRLyX5/u9CnVulUtDgUTama3lT+bf/UqucuZjqiGuTS1g== + dependencies: + "@babel/types" "^7.16.7" + +"@babel/helper-skip-transparent-expression-wrappers@^7.16.0": + version "7.16.0" + resolved "https://registry.yarnpkg.com/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.16.0.tgz#0ee3388070147c3ae051e487eca3ebb0e2e8bb09" + integrity sha512-+il1gTy0oHwUsBQZyJvukbB4vPMdcYBrFHa0Uc4AizLxbq6BOYC51Rv4tWocX9BLBDLZ4kc6qUFpQ6HRgL+3zw== + dependencies: + "@babel/types" "^7.16.0" + +"@babel/helper-split-export-declaration@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.16.7.tgz#0b648c0c42da9d3920d85ad585f2778620b8726b" + integrity sha512-xbWoy/PFoxSWazIToT9Sif+jJTlrMcndIsaOKvTA6u7QEo7ilkRZpjew18/W3c7nm8fXdUDXh02VXTbZ0pGDNw== + dependencies: + "@babel/types" "^7.16.7" + +"@babel/helper-validator-identifier@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.16.7.tgz#e8c602438c4a8195751243da9031d1607d247cad" + integrity sha512-hsEnFemeiW4D08A5gUAZxLBTXpZ39P+a+DGDsHw1yxqyQ/jzFEnxf5uTEGp+3bzAbNOxU1paTgYS4ECU/IgfDw== + +"@babel/helper-validator-option@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.16.7.tgz#b203ce62ce5fe153899b617c08957de860de4d23" + integrity sha512-TRtenOuRUVo9oIQGPC5G9DgK4743cdxvtOw0weQNpZXaS16SCBi5MNjZF8vba3ETURjZpTbVn7Vvcf2eAwFozQ== + +"@babel/helper-wrap-function@^7.16.8": + version "7.16.8" + resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.16.8.tgz#58afda087c4cd235de92f7ceedebca2c41274200" + integrity sha512-8RpyRVIAW1RcDDGTA+GpPAwV22wXCfKOoM9bet6TLkGIFTkRQSkH1nMQ5Yet4MpoXe1ZwHPVtNasc2w0uZMqnw== + dependencies: + "@babel/helper-function-name" "^7.16.7" + "@babel/template" "^7.16.7" + "@babel/traverse" "^7.16.8" + "@babel/types" "^7.16.8" + +"@babel/helpers@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.16.7.tgz#7e3504d708d50344112767c3542fc5e357fffefc" + integrity sha512-9ZDoqtfY7AuEOt3cxchfii6C7GDyyMBffktR5B2jvWv8u2+efwvpnVKXMWzNehqy68tKgAfSwfdw/lWpthS2bw== + dependencies: + "@babel/template" "^7.16.7" + "@babel/traverse" "^7.16.7" + "@babel/types" "^7.16.7" + +"@babel/helpers@^7.17.2": + version "7.17.2" + resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.17.2.tgz#23f0a0746c8e287773ccd27c14be428891f63417" + integrity sha512-0Qu7RLR1dILozr/6M0xgj+DFPmi6Bnulgm9M8BVa9ZCWxDqlSnqt3cf8IDPB5m45sVXUZ0kuQAgUrdSFFH79fQ== + dependencies: + "@babel/template" "^7.16.7" + "@babel/traverse" "^7.17.0" + "@babel/types" "^7.17.0" + +"@babel/highlight@^7.16.7": + version "7.16.10" + resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.16.10.tgz#744f2eb81579d6eea753c227b0f570ad785aba88" + integrity sha512-5FnTQLSLswEj6IkgVw5KusNUUFY9ZGqe/TRFnP/BKYHYgfh7tc+C7mwiy95/yNP7Dh9x580Vv8r7u7ZfTBFxdw== + dependencies: + "@babel/helper-validator-identifier" "^7.16.7" + chalk "^2.0.0" + js-tokens "^4.0.0" + +"@babel/parser@^7.1.0", "@babel/parser@^7.14.7", "@babel/parser@^7.16.10", "@babel/parser@^7.16.12", "@babel/parser@^7.16.7": + version "7.16.12" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.16.12.tgz#9474794f9a650cf5e2f892444227f98e28cdf8b6" + integrity sha512-VfaV15po8RiZssrkPweyvbGVSe4x2y+aciFCgn0n0/SJMR22cwofRV1mtnJQYcSB1wUTaA/X1LnA3es66MCO5A== + +"@babel/parser@^7.17.3": + version "7.17.3" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.17.3.tgz#b07702b982990bf6fdc1da5049a23fece4c5c3d0" + integrity sha512-7yJPvPV+ESz2IUTPbOL+YkIGyCqOyNIzdguKQuJGnH7bg1WTIifuM21YqokFt/THWh1AkCRn9IgoykTRCBVpzA== + +"@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.16.7.tgz#4eda6d6c2a0aa79c70fa7b6da67763dfe2141050" + integrity sha512-anv/DObl7waiGEnC24O9zqL0pSuI9hljihqiDuFHC8d7/bjr/4RLGPWuc8rYOff/QPzbEPSkzG8wGG9aDuhHRg== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.16.7.tgz#cc001234dfc139ac45f6bcf801866198c8c72ff9" + integrity sha512-di8vUHRdf+4aJ7ltXhaDbPoszdkh59AQtJM5soLsuHpQJdFQZOA4uGj0V2u/CZ8bJ/u8ULDL5yq6FO/bCXnKHw== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/helper-skip-transparent-expression-wrappers" "^7.16.0" + "@babel/plugin-proposal-optional-chaining" "^7.16.7" + +"@babel/plugin-proposal-async-generator-functions@^7.16.8": + version "7.16.8" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.16.8.tgz#3bdd1ebbe620804ea9416706cd67d60787504bc8" + integrity sha512-71YHIvMuiuqWJQkebWJtdhQTfd4Q4mF76q2IX37uZPkG9+olBxsX+rH1vkhFto4UeJZ9dPY2s+mDvhDm1u2BGQ== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/helper-remap-async-to-generator" "^7.16.8" + "@babel/plugin-syntax-async-generators" "^7.8.4" + +"@babel/plugin-proposal-class-properties@^7.16.0", "@babel/plugin-proposal-class-properties@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.16.7.tgz#925cad7b3b1a2fcea7e59ecc8eb5954f961f91b0" + integrity sha512-IobU0Xme31ewjYOShSIqd/ZGM/r/cuOz2z0MDbNrhF5FW+ZVgi0f2lyeoj9KFPDOAqsYxmLWZte1WOwlvY9aww== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.16.7" + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-proposal-class-static-block@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-class-static-block/-/plugin-proposal-class-static-block-7.16.7.tgz#712357570b612106ef5426d13dc433ce0f200c2a" + integrity sha512-dgqJJrcZoG/4CkMopzhPJjGxsIe9A8RlkQLnL/Vhhx8AA9ZuaRwGSlscSh42hazc7WSrya/IK7mTeoF0DP9tEw== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.16.7" + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/plugin-syntax-class-static-block" "^7.14.5" + +"@babel/plugin-proposal-decorators@^7.16.4": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-decorators/-/plugin-proposal-decorators-7.16.7.tgz#922907d2e3e327f5b07d2246bcfc0bd438f360d2" + integrity sha512-DoEpnuXK14XV9btI1k8tzNGCutMclpj4yru8aXKoHlVmbO1s+2A+g2+h4JhcjrxkFJqzbymnLG6j/niOf3iFXQ== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.16.7" + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/plugin-syntax-decorators" "^7.16.7" + +"@babel/plugin-proposal-dynamic-import@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.16.7.tgz#c19c897eaa46b27634a00fee9fb7d829158704b2" + integrity sha512-I8SW9Ho3/8DRSdmDdH3gORdyUuYnk1m4cMxUAdu5oy4n3OfN8flDEH+d60iG7dUfi0KkYwSvoalHzzdRzpWHTg== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/plugin-syntax-dynamic-import" "^7.8.3" + +"@babel/plugin-proposal-export-namespace-from@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-export-namespace-from/-/plugin-proposal-export-namespace-from-7.16.7.tgz#09de09df18445a5786a305681423ae63507a6163" + integrity sha512-ZxdtqDXLRGBL64ocZcs7ovt71L3jhC1RGSyR996svrCi3PYqHNkb3SwPJCs8RIzD86s+WPpt2S73+EHCGO+NUA== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/plugin-syntax-export-namespace-from" "^7.8.3" + +"@babel/plugin-proposal-json-strings@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.16.7.tgz#9732cb1d17d9a2626a08c5be25186c195b6fa6e8" + integrity sha512-lNZ3EEggsGY78JavgbHsK9u5P3pQaW7k4axlgFLYkMd7UBsiNahCITShLjNQschPyjtO6dADrL24757IdhBrsQ== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/plugin-syntax-json-strings" "^7.8.3" + +"@babel/plugin-proposal-logical-assignment-operators@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-logical-assignment-operators/-/plugin-proposal-logical-assignment-operators-7.16.7.tgz#be23c0ba74deec1922e639832904be0bea73cdea" + integrity sha512-K3XzyZJGQCr00+EtYtrDjmwX7o7PLK6U9bi1nCwkQioRFVUv6dJoxbQjtWVtP+bCPy82bONBKG8NPyQ4+i6yjg== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" + +"@babel/plugin-proposal-nullish-coalescing-operator@^7.16.0", "@babel/plugin-proposal-nullish-coalescing-operator@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.16.7.tgz#141fc20b6857e59459d430c850a0011e36561d99" + integrity sha512-aUOrYU3EVtjf62jQrCj63pYZ7k6vns2h/DQvHPWGmsJRYzWXZ6/AsfgpiRy6XiuIDADhJzP2Q9MwSMKauBQ+UQ== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" + +"@babel/plugin-proposal-numeric-separator@^7.16.0", "@babel/plugin-proposal-numeric-separator@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.16.7.tgz#d6b69f4af63fb38b6ca2558442a7fb191236eba9" + integrity sha512-vQgPMknOIgiuVqbokToyXbkY/OmmjAzr/0lhSIbG/KmnzXPGwW/AdhdKpi+O4X/VkWiWjnkKOBiqJrTaC98VKw== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/plugin-syntax-numeric-separator" "^7.10.4" + +"@babel/plugin-proposal-object-rest-spread@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.16.7.tgz#94593ef1ddf37021a25bdcb5754c4a8d534b01d8" + integrity sha512-3O0Y4+dw94HA86qSg9IHfyPktgR7q3gpNVAeiKQd+8jBKFaU5NQS1Yatgo4wY+UFNuLjvxcSmzcsHqrhgTyBUA== + dependencies: + "@babel/compat-data" "^7.16.4" + "@babel/helper-compilation-targets" "^7.16.7" + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/plugin-syntax-object-rest-spread" "^7.8.3" + "@babel/plugin-transform-parameters" "^7.16.7" + +"@babel/plugin-proposal-optional-catch-binding@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.16.7.tgz#c623a430674ffc4ab732fd0a0ae7722b67cb74cf" + integrity sha512-eMOH/L4OvWSZAE1VkHbr1vckLG1WUcHGJSLqqQwl2GaUqG6QjddvrOaTUMNYiv77H5IKPMZ9U9P7EaHwvAShfA== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" + +"@babel/plugin-proposal-optional-chaining@^7.16.0", "@babel/plugin-proposal-optional-chaining@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.16.7.tgz#7cd629564724816c0e8a969535551f943c64c39a" + integrity sha512-eC3xy+ZrUcBtP7x+sq62Q/HYd674pPTb/77XZMb5wbDPGWIdUbSr4Agr052+zaUPSb+gGRnjxXfKFvx5iMJ+DA== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/helper-skip-transparent-expression-wrappers" "^7.16.0" + "@babel/plugin-syntax-optional-chaining" "^7.8.3" + +"@babel/plugin-proposal-private-methods@^7.16.0", "@babel/plugin-proposal-private-methods@^7.16.11": + version "7.16.11" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-private-methods/-/plugin-proposal-private-methods-7.16.11.tgz#e8df108288555ff259f4527dbe84813aac3a1c50" + integrity sha512-F/2uAkPlXDr8+BHpZvo19w3hLFKge+k75XUprE6jaqKxjGkSYcK+4c+bup5PdW/7W/Rpjwql7FTVEDW+fRAQsw== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.16.10" + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-proposal-private-property-in-object@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.16.7.tgz#b0b8cef543c2c3d57e59e2c611994861d46a3fce" + integrity sha512-rMQkjcOFbm+ufe3bTZLyOfsOUOxyvLXZJCTARhJr+8UMSoZmqTe1K1BgkFcrW37rAchWg57yI69ORxiWvUINuQ== + dependencies: + "@babel/helper-annotate-as-pure" "^7.16.7" + "@babel/helper-create-class-features-plugin" "^7.16.7" + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/plugin-syntax-private-property-in-object" "^7.14.5" + +"@babel/plugin-proposal-unicode-property-regex@^7.16.7", "@babel/plugin-proposal-unicode-property-regex@^7.4.4": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.16.7.tgz#635d18eb10c6214210ffc5ff4932552de08188a2" + integrity sha512-QRK0YI/40VLhNVGIjRNAAQkEHws0cswSdFFjpFyt943YmJIU1da9uW63Iu6NFV6CxTZW5eTDCrwZUstBWgp/Rg== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.16.7" + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-syntax-async-generators@^7.8.4": + version "7.8.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz#a983fb1aeb2ec3f6ed042a210f640e90e786fe0d" + integrity sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-bigint@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz#4c9a6f669f5d0cdf1b90a1671e9a146be5300cea" + integrity sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-class-properties@^7.12.13", "@babel/plugin-syntax-class-properties@^7.8.3": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz#b5c987274c4a3a82b89714796931a6b53544ae10" + integrity sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA== + dependencies: + "@babel/helper-plugin-utils" "^7.12.13" + +"@babel/plugin-syntax-class-static-block@^7.14.5": + version "7.14.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz#195df89b146b4b78b3bf897fd7a257c84659d406" + integrity sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw== + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-decorators@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-decorators/-/plugin-syntax-decorators-7.16.7.tgz#f66a0199f16de7c1ef5192160ccf5d069739e3d3" + integrity sha512-vQ+PxL+srA7g6Rx6I1e15m55gftknl2X8GCUW1JTlkTaXZLJOS0UcaY0eK9jYT7IYf4awn6qwyghVHLDz1WyMw== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-syntax-dynamic-import@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz#62bf98b2da3cd21d626154fc96ee5b3cb68eacb3" + integrity sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-export-namespace-from@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz#028964a9ba80dbc094c915c487ad7c4e7a66465a" + integrity sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q== + dependencies: + "@babel/helper-plugin-utils" "^7.8.3" + +"@babel/plugin-syntax-flow@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-flow/-/plugin-syntax-flow-7.16.7.tgz#202b147e5892b8452bbb0bb269c7ed2539ab8832" + integrity sha512-UDo3YGQO0jH6ytzVwgSLv9i/CzMcUjbKenL67dTrAZPPv6GFAtDhe6jqnvmoKzC/7htNTohhos+onPtDMqJwaQ== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-syntax-import-meta@^7.8.3": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz#ee601348c370fa334d2207be158777496521fd51" + integrity sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-json-strings@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz#01ca21b668cd8218c9e640cb6dd88c5412b2c96a" + integrity sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-jsx@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.16.7.tgz#50b6571d13f764266a113d77c82b4a6508bbe665" + integrity sha512-Esxmk7YjA8QysKeT3VhTXvF6y77f/a91SIs4pWb4H2eWGQkCKFgQaG6hdoEVZtGsrAcb2K5BW66XsOErD4WU3Q== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-syntax-logical-assignment-operators@^7.10.4", "@babel/plugin-syntax-logical-assignment-operators@^7.8.3": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz#ca91ef46303530448b906652bac2e9fe9941f699" + integrity sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-nullish-coalescing-operator@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz#167ed70368886081f74b5c36c65a88c03b66d1a9" + integrity sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-numeric-separator@^7.10.4", "@babel/plugin-syntax-numeric-separator@^7.8.3": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz#b9b070b3e33570cd9fd07ba7fa91c0dd37b9af97" + integrity sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-object-rest-spread@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz#60e225edcbd98a640332a2e72dd3e66f1af55871" + integrity sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-optional-catch-binding@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz#6111a265bcfb020eb9efd0fdfd7d26402b9ed6c1" + integrity sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-optional-chaining@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz#4f69c2ab95167e0180cd5336613f8c5788f7d48a" + integrity sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-private-property-in-object@^7.14.5": + version "7.14.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz#0dc6671ec0ea22b6e94a1114f857970cd39de1ad" + integrity sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg== + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-top-level-await@^7.14.5", "@babel/plugin-syntax-top-level-await@^7.8.3": + version "7.14.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz#c1cfdadc35a646240001f06138247b741c34d94c" + integrity sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw== + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-typescript@^7.16.7", "@babel/plugin-syntax-typescript@^7.7.2": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.16.7.tgz#39c9b55ee153151990fb038651d58d3fd03f98f8" + integrity sha512-YhUIJHHGkqPgEcMYkPCKTyGUdoGKWtopIycQyjJH8OjvRgOYsXsaKehLVPScKJWAULPxMa4N1vCe6szREFlZ7A== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-arrow-functions@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.16.7.tgz#44125e653d94b98db76369de9c396dc14bef4154" + integrity sha512-9ffkFFMbvzTvv+7dTp/66xvZAWASuPD5Tl9LK3Z9vhOmANo6j94rik+5YMBt4CwHVMWLWpMsriIc2zsa3WW3xQ== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-async-to-generator@^7.16.8": + version "7.16.8" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.16.8.tgz#b83dff4b970cf41f1b819f8b49cc0cfbaa53a808" + integrity sha512-MtmUmTJQHCnyJVrScNzNlofQJ3dLFuobYn3mwOTKHnSCMtbNsqvF71GQmJfFjdrXSsAA7iysFmYWw4bXZ20hOg== + dependencies: + "@babel/helper-module-imports" "^7.16.7" + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/helper-remap-async-to-generator" "^7.16.8" + +"@babel/plugin-transform-block-scoped-functions@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.16.7.tgz#4d0d57d9632ef6062cdf354bb717102ee042a620" + integrity sha512-JUuzlzmF40Z9cXyytcbZEZKckgrQzChbQJw/5PuEHYeqzCsvebDx0K0jWnIIVcmmDOAVctCgnYs0pMcrYj2zJg== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-block-scoping@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.16.7.tgz#f50664ab99ddeaee5bc681b8f3a6ea9d72ab4f87" + integrity sha512-ObZev2nxVAYA4bhyusELdo9hb3H+A56bxH3FZMbEImZFiEDYVHXQSJ1hQKFlDnlt8G9bBrCZ5ZpURZUrV4G5qQ== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-classes@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.16.7.tgz#8f4b9562850cd973de3b498f1218796eb181ce00" + integrity sha512-WY7og38SFAGYRe64BrjKf8OrE6ulEHtr5jEYaZMwox9KebgqPi67Zqz8K53EKk1fFEJgm96r32rkKZ3qA2nCWQ== + dependencies: + "@babel/helper-annotate-as-pure" "^7.16.7" + "@babel/helper-environment-visitor" "^7.16.7" + "@babel/helper-function-name" "^7.16.7" + "@babel/helper-optimise-call-expression" "^7.16.7" + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/helper-replace-supers" "^7.16.7" + "@babel/helper-split-export-declaration" "^7.16.7" + globals "^11.1.0" + +"@babel/plugin-transform-computed-properties@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.16.7.tgz#66dee12e46f61d2aae7a73710f591eb3df616470" + integrity sha512-gN72G9bcmenVILj//sv1zLNaPyYcOzUho2lIJBMh/iakJ9ygCo/hEF9cpGb61SCMEDxbbyBoVQxrt+bWKu5KGw== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-destructuring@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.16.7.tgz#ca9588ae2d63978a4c29d3f33282d8603f618e23" + integrity sha512-VqAwhTHBnu5xBVDCvrvqJbtLUa++qZaWC0Fgr2mqokBlulZARGyIvZDoqbPlPaKImQ9dKAcCzbv+ul//uqu70A== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-dotall-regex@^7.16.7", "@babel/plugin-transform-dotall-regex@^7.4.4": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.16.7.tgz#6b2d67686fab15fb6a7fd4bd895d5982cfc81241" + integrity sha512-Lyttaao2SjZF6Pf4vk1dVKv8YypMpomAbygW+mU5cYP3S5cWTfCJjG8xV6CFdzGFlfWK81IjL9viiTvpb6G7gQ== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.16.7" + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-duplicate-keys@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.16.7.tgz#2207e9ca8f82a0d36a5a67b6536e7ef8b08823c9" + integrity sha512-03DvpbRfvWIXyK0/6QiR1KMTWeT6OcQ7tbhjrXyFS02kjuX/mu5Bvnh5SDSWHxyawit2g5aWhKwI86EE7GUnTw== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-exponentiation-operator@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.16.7.tgz#efa9862ef97e9e9e5f653f6ddc7b665e8536fe9b" + integrity sha512-8UYLSlyLgRixQvlYH3J2ekXFHDFLQutdy7FfFAMm3CPZ6q9wHCwnUyiXpQCe3gVVnQlHc5nsuiEVziteRNTXEA== + dependencies: + "@babel/helper-builder-binary-assignment-operator-visitor" "^7.16.7" + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-flow-strip-types@^7.16.0": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-flow-strip-types/-/plugin-transform-flow-strip-types-7.16.7.tgz#291fb140c78dabbf87f2427e7c7c332b126964b8" + integrity sha512-mzmCq3cNsDpZZu9FADYYyfZJIOrSONmHcop2XEKPdBNMa4PDC4eEvcOvzZaCNcjKu72v0XQlA5y1g58aLRXdYg== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/plugin-syntax-flow" "^7.16.7" + +"@babel/plugin-transform-for-of@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.16.7.tgz#649d639d4617dff502a9a158c479b3b556728d8c" + integrity sha512-/QZm9W92Ptpw7sjI9Nx1mbcsWz33+l8kuMIQnDwgQBG5s3fAfQvkRjQ7NqXhtNcKOnPkdICmUHyCaWW06HCsqg== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-function-name@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.16.7.tgz#5ab34375c64d61d083d7d2f05c38d90b97ec65cf" + integrity sha512-SU/C68YVwTRxqWj5kgsbKINakGag0KTgq9f2iZEXdStoAbOzLHEBRYzImmA6yFo8YZhJVflvXmIHUO7GWHmxxA== + dependencies: + "@babel/helper-compilation-targets" "^7.16.7" + "@babel/helper-function-name" "^7.16.7" + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-literals@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-literals/-/plugin-transform-literals-7.16.7.tgz#254c9618c5ff749e87cb0c0cef1a0a050c0bdab1" + integrity sha512-6tH8RTpTWI0s2sV6uq3e/C9wPo4PTqqZps4uF0kzQ9/xPLFQtipynvmT1g/dOfEJ+0EQsHhkQ/zyRId8J2b8zQ== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-member-expression-literals@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.16.7.tgz#6e5dcf906ef8a098e630149d14c867dd28f92384" + integrity sha512-mBruRMbktKQwbxaJof32LT9KLy2f3gH+27a5XSuXo6h7R3vqltl0PgZ80C8ZMKw98Bf8bqt6BEVi3svOh2PzMw== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-modules-amd@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.16.7.tgz#b28d323016a7daaae8609781d1f8c9da42b13186" + integrity sha512-KaaEtgBL7FKYwjJ/teH63oAmE3lP34N3kshz8mm4VMAw7U3PxjVwwUmxEFksbgsNUaO3wId9R2AVQYSEGRa2+g== + dependencies: + "@babel/helper-module-transforms" "^7.16.7" + "@babel/helper-plugin-utils" "^7.16.7" + babel-plugin-dynamic-import-node "^2.3.3" + +"@babel/plugin-transform-modules-commonjs@^7.16.8": + version "7.16.8" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.16.8.tgz#cdee19aae887b16b9d331009aa9a219af7c86afe" + integrity sha512-oflKPvsLT2+uKQopesJt3ApiaIS2HW+hzHFcwRNtyDGieAeC/dIHZX8buJQ2J2X1rxGPy4eRcUijm3qcSPjYcA== + dependencies: + "@babel/helper-module-transforms" "^7.16.7" + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/helper-simple-access" "^7.16.7" + babel-plugin-dynamic-import-node "^2.3.3" + +"@babel/plugin-transform-modules-systemjs@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.16.7.tgz#887cefaef88e684d29558c2b13ee0563e287c2d7" + integrity sha512-DuK5E3k+QQmnOqBR9UkusByy5WZWGRxfzV529s9nPra1GE7olmxfqO2FHobEOYSPIjPBTr4p66YDcjQnt8cBmw== + dependencies: + "@babel/helper-hoist-variables" "^7.16.7" + "@babel/helper-module-transforms" "^7.16.7" + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/helper-validator-identifier" "^7.16.7" + babel-plugin-dynamic-import-node "^2.3.3" + +"@babel/plugin-transform-modules-umd@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.16.7.tgz#23dad479fa585283dbd22215bff12719171e7618" + integrity sha512-EMh7uolsC8O4xhudF2F6wedbSHm1HHZ0C6aJ7K67zcDNidMzVcxWdGr+htW9n21klm+bOn+Rx4CBsAntZd3rEQ== + dependencies: + "@babel/helper-module-transforms" "^7.16.7" + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-named-capturing-groups-regex@^7.16.8": + version "7.16.8" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.16.8.tgz#7f860e0e40d844a02c9dcf9d84965e7dfd666252" + integrity sha512-j3Jw+n5PvpmhRR+mrgIh04puSANCk/T/UA3m3P1MjJkhlK906+ApHhDIqBQDdOgL/r1UYpz4GNclTXxyZrYGSw== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.16.7" + +"@babel/plugin-transform-new-target@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.16.7.tgz#9967d89a5c243818e0800fdad89db22c5f514244" + integrity sha512-xiLDzWNMfKoGOpc6t3U+etCE2yRnn3SM09BXqWPIZOBpL2gvVrBWUKnsJx0K/ADi5F5YC5f8APFfWrz25TdlGg== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-object-super@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.16.7.tgz#ac359cf8d32cf4354d27a46867999490b6c32a94" + integrity sha512-14J1feiQVWaGvRxj2WjyMuXS2jsBkgB3MdSN5HuC2G5nRspa5RK9COcs82Pwy5BuGcjb+fYaUj94mYcOj7rCvw== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/helper-replace-supers" "^7.16.7" + +"@babel/plugin-transform-parameters@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.16.7.tgz#a1721f55b99b736511cb7e0152f61f17688f331f" + integrity sha512-AT3MufQ7zZEhU2hwOA11axBnExW0Lszu4RL/tAlUJBuNoRak+wehQW8h6KcXOcgjY42fHtDxswuMhMjFEuv/aw== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-property-literals@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.16.7.tgz#2dadac85155436f22c696c4827730e0fe1057a55" + integrity sha512-z4FGr9NMGdoIl1RqavCqGG+ZuYjfZ/hkCIeuH6Do7tXmSm0ls11nYVSJqFEUOSJbDab5wC6lRE/w6YjVcr6Hqw== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-react-constant-elements@^7.12.1": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-constant-elements/-/plugin-transform-react-constant-elements-7.16.7.tgz#19e9e4c2df2f6c3e6b3aea11778297d81db8df62" + integrity sha512-lF+cfsyTgwWkcw715J88JhMYJ5GpysYNLhLP1PkvkhTRN7B3e74R/1KsDxFxhRpSn0UUD3IWM4GvdBR2PEbbQQ== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-react-display-name@^7.16.0", "@babel/plugin-transform-react-display-name@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.16.7.tgz#7b6d40d232f4c0f550ea348593db3b21e2404340" + integrity sha512-qgIg8BcZgd0G/Cz916D5+9kqX0c7nPZyXaP8R2tLNN5tkyIZdG5fEwBrxwplzSnjC1jvQmyMNVwUCZPcbGY7Pg== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-react-jsx-development@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx-development/-/plugin-transform-react-jsx-development-7.16.7.tgz#43a00724a3ed2557ed3f276a01a929e6686ac7b8" + integrity sha512-RMvQWvpla+xy6MlBpPlrKZCMRs2AGiHOGHY3xRwl0pEeim348dDyxeH4xBsMPbIMhujeq7ihE702eM2Ew0Wo+A== + dependencies: + "@babel/plugin-transform-react-jsx" "^7.16.7" + +"@babel/plugin-transform-react-jsx@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.16.7.tgz#86a6a220552afd0e4e1f0388a68a372be7add0d4" + integrity sha512-8D16ye66fxiE8m890w0BpPpngG9o9OVBBy0gH2E+2AR7qMR2ZpTYJEqLxAsoroenMId0p/wMW+Blc0meDgu0Ag== + dependencies: + "@babel/helper-annotate-as-pure" "^7.16.7" + "@babel/helper-module-imports" "^7.16.7" + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/plugin-syntax-jsx" "^7.16.7" + "@babel/types" "^7.16.7" + +"@babel/plugin-transform-react-pure-annotations@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-pure-annotations/-/plugin-transform-react-pure-annotations-7.16.7.tgz#232bfd2f12eb551d6d7d01d13fe3f86b45eb9c67" + integrity sha512-hs71ToC97k3QWxswh2ElzMFABXHvGiJ01IB1TbYQDGeWRKWz/MPUTh5jGExdHvosYKpnJW5Pm3S4+TA3FyX+GA== + dependencies: + "@babel/helper-annotate-as-pure" "^7.16.7" + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-regenerator@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.16.7.tgz#9e7576dc476cb89ccc5096fff7af659243b4adeb" + integrity sha512-mF7jOgGYCkSJagJ6XCujSQg+6xC1M77/03K2oBmVJWoFGNUtnVJO4WHKJk3dnPC8HCcj4xBQP1Egm8DWh3Pb3Q== + dependencies: + regenerator-transform "^0.14.2" + +"@babel/plugin-transform-reserved-words@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.16.7.tgz#1d798e078f7c5958eec952059c460b220a63f586" + integrity sha512-KQzzDnZ9hWQBjwi5lpY5v9shmm6IVG0U9pB18zvMu2i4H90xpT4gmqwPYsn8rObiadYe2M0gmgsiOIF5A/2rtg== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-runtime@^7.16.4": + version "7.16.10" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.16.10.tgz#53d9fd3496daedce1dd99639097fa5d14f4c7c2c" + integrity sha512-9nwTiqETv2G7xI4RvXHNfpGdr8pAA+Q/YtN3yLK7OoK7n9OibVm/xymJ838a9A6E/IciOLPj82lZk0fW6O4O7w== + dependencies: + "@babel/helper-module-imports" "^7.16.7" + "@babel/helper-plugin-utils" "^7.16.7" + babel-plugin-polyfill-corejs2 "^0.3.0" + babel-plugin-polyfill-corejs3 "^0.5.0" + babel-plugin-polyfill-regenerator "^0.3.0" + semver "^6.3.0" + +"@babel/plugin-transform-shorthand-properties@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.16.7.tgz#e8549ae4afcf8382f711794c0c7b6b934c5fbd2a" + integrity sha512-hah2+FEnoRoATdIb05IOXf+4GzXYTq75TVhIn1PewihbpyrNWUt2JbudKQOETWw6QpLe+AIUpJ5MVLYTQbeeUg== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-spread@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.16.7.tgz#a303e2122f9f12e0105daeedd0f30fb197d8ff44" + integrity sha512-+pjJpgAngb53L0iaA5gU/1MLXJIfXcYepLgXB3esVRf4fqmj8f2cxM3/FKaHsZms08hFQJkFccEWuIpm429TXg== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/helper-skip-transparent-expression-wrappers" "^7.16.0" + +"@babel/plugin-transform-sticky-regex@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.16.7.tgz#c84741d4f4a38072b9a1e2e3fd56d359552e8660" + integrity sha512-NJa0Bd/87QV5NZZzTuZG5BPJjLYadeSZ9fO6oOUoL4iQx+9EEuw/eEM92SrsT19Yc2jgB1u1hsjqDtH02c3Drw== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-template-literals@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.16.7.tgz#f3d1c45d28967c8e80f53666fc9c3e50618217ab" + integrity sha512-VwbkDDUeenlIjmfNeDX/V0aWrQH2QiVyJtwymVQSzItFDTpxfyJh3EVaQiS0rIN/CqbLGr0VcGmuwyTdZtdIsA== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-typeof-symbol@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.16.7.tgz#9cdbe622582c21368bd482b660ba87d5545d4f7e" + integrity sha512-p2rOixCKRJzpg9JB4gjnG4gjWkWa89ZoYUnl9snJ1cWIcTH/hvxZqfO+WjG6T8DRBpctEol5jw1O5rA8gkCokQ== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-typescript@^7.16.7": + version "7.16.8" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.16.8.tgz#591ce9b6b83504903fa9dd3652c357c2ba7a1ee0" + integrity sha512-bHdQ9k7YpBDO2d0NVfkj51DpQcvwIzIusJ7mEUaMlbZq3Kt/U47j24inXZHQ5MDiYpCs+oZiwnXyKedE8+q7AQ== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.16.7" + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/plugin-syntax-typescript" "^7.16.7" + +"@babel/plugin-transform-unicode-escapes@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.16.7.tgz#da8717de7b3287a2c6d659750c964f302b31ece3" + integrity sha512-TAV5IGahIz3yZ9/Hfv35TV2xEm+kaBDaZQCn2S/hG9/CZ0DktxJv9eKfPc7yYCvOYR4JGx1h8C+jcSOvgaaI/Q== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-unicode-regex@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.16.7.tgz#0f7aa4a501198976e25e82702574c34cfebe9ef2" + integrity sha512-oC5tYYKw56HO75KZVLQ+R/Nl3Hro9kf8iG0hXoaHP7tjAyCpvqBiSNe6vGrZni1Z6MggmUOC6A7VP7AVmw225Q== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.16.7" + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/preset-env@^7.11.0", "@babel/preset-env@^7.12.1", "@babel/preset-env@^7.16.11", "@babel/preset-env@^7.16.4": + version "7.16.11" + resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.16.11.tgz#5dd88fd885fae36f88fd7c8342475c9f0abe2982" + integrity sha512-qcmWG8R7ZW6WBRPZK//y+E3Cli151B20W1Rv7ln27vuPaXU/8TKms6jFdiJtF7UDTxcrb7mZd88tAeK9LjdT8g== + dependencies: + "@babel/compat-data" "^7.16.8" + "@babel/helper-compilation-targets" "^7.16.7" + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/helper-validator-option" "^7.16.7" + "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression" "^7.16.7" + "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining" "^7.16.7" + "@babel/plugin-proposal-async-generator-functions" "^7.16.8" + "@babel/plugin-proposal-class-properties" "^7.16.7" + "@babel/plugin-proposal-class-static-block" "^7.16.7" + "@babel/plugin-proposal-dynamic-import" "^7.16.7" + "@babel/plugin-proposal-export-namespace-from" "^7.16.7" + "@babel/plugin-proposal-json-strings" "^7.16.7" + "@babel/plugin-proposal-logical-assignment-operators" "^7.16.7" + "@babel/plugin-proposal-nullish-coalescing-operator" "^7.16.7" + "@babel/plugin-proposal-numeric-separator" "^7.16.7" + "@babel/plugin-proposal-object-rest-spread" "^7.16.7" + "@babel/plugin-proposal-optional-catch-binding" "^7.16.7" + "@babel/plugin-proposal-optional-chaining" "^7.16.7" + "@babel/plugin-proposal-private-methods" "^7.16.11" + "@babel/plugin-proposal-private-property-in-object" "^7.16.7" + "@babel/plugin-proposal-unicode-property-regex" "^7.16.7" + "@babel/plugin-syntax-async-generators" "^7.8.4" + "@babel/plugin-syntax-class-properties" "^7.12.13" + "@babel/plugin-syntax-class-static-block" "^7.14.5" + "@babel/plugin-syntax-dynamic-import" "^7.8.3" + "@babel/plugin-syntax-export-namespace-from" "^7.8.3" + "@babel/plugin-syntax-json-strings" "^7.8.3" + "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" + "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" + "@babel/plugin-syntax-numeric-separator" "^7.10.4" + "@babel/plugin-syntax-object-rest-spread" "^7.8.3" + "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" + "@babel/plugin-syntax-optional-chaining" "^7.8.3" + "@babel/plugin-syntax-private-property-in-object" "^7.14.5" + "@babel/plugin-syntax-top-level-await" "^7.14.5" + "@babel/plugin-transform-arrow-functions" "^7.16.7" + "@babel/plugin-transform-async-to-generator" "^7.16.8" + "@babel/plugin-transform-block-scoped-functions" "^7.16.7" + "@babel/plugin-transform-block-scoping" "^7.16.7" + "@babel/plugin-transform-classes" "^7.16.7" + "@babel/plugin-transform-computed-properties" "^7.16.7" + "@babel/plugin-transform-destructuring" "^7.16.7" + "@babel/plugin-transform-dotall-regex" "^7.16.7" + "@babel/plugin-transform-duplicate-keys" "^7.16.7" + "@babel/plugin-transform-exponentiation-operator" "^7.16.7" + "@babel/plugin-transform-for-of" "^7.16.7" + "@babel/plugin-transform-function-name" "^7.16.7" + "@babel/plugin-transform-literals" "^7.16.7" + "@babel/plugin-transform-member-expression-literals" "^7.16.7" + "@babel/plugin-transform-modules-amd" "^7.16.7" + "@babel/plugin-transform-modules-commonjs" "^7.16.8" + "@babel/plugin-transform-modules-systemjs" "^7.16.7" + "@babel/plugin-transform-modules-umd" "^7.16.7" + "@babel/plugin-transform-named-capturing-groups-regex" "^7.16.8" + "@babel/plugin-transform-new-target" "^7.16.7" + "@babel/plugin-transform-object-super" "^7.16.7" + "@babel/plugin-transform-parameters" "^7.16.7" + "@babel/plugin-transform-property-literals" "^7.16.7" + "@babel/plugin-transform-regenerator" "^7.16.7" + "@babel/plugin-transform-reserved-words" "^7.16.7" + "@babel/plugin-transform-shorthand-properties" "^7.16.7" + "@babel/plugin-transform-spread" "^7.16.7" + "@babel/plugin-transform-sticky-regex" "^7.16.7" + "@babel/plugin-transform-template-literals" "^7.16.7" + "@babel/plugin-transform-typeof-symbol" "^7.16.7" + "@babel/plugin-transform-unicode-escapes" "^7.16.7" + "@babel/plugin-transform-unicode-regex" "^7.16.7" + "@babel/preset-modules" "^0.1.5" + "@babel/types" "^7.16.8" + babel-plugin-polyfill-corejs2 "^0.3.0" + babel-plugin-polyfill-corejs3 "^0.5.0" + babel-plugin-polyfill-regenerator "^0.3.0" + core-js-compat "^3.20.2" + semver "^6.3.0" + +"@babel/preset-modules@^0.1.5": + version "0.1.5" + resolved "https://registry.yarnpkg.com/@babel/preset-modules/-/preset-modules-0.1.5.tgz#ef939d6e7f268827e1841638dc6ff95515e115d9" + integrity sha512-A57th6YRG7oR3cq/yt/Y84MvGgE0eJG2F1JLhKuyG+jFxEgrd/HAMJatiFtmOiZurz+0DkrvbheCLaV5f2JfjA== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + "@babel/plugin-proposal-unicode-property-regex" "^7.4.4" + "@babel/plugin-transform-dotall-regex" "^7.4.4" + "@babel/types" "^7.4.4" + esutils "^2.0.2" + +"@babel/preset-react@^7.12.5", "@babel/preset-react@^7.16.0", "@babel/preset-react@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/preset-react/-/preset-react-7.16.7.tgz#4c18150491edc69c183ff818f9f2aecbe5d93852" + integrity sha512-fWpyI8UM/HE6DfPBzD8LnhQ/OcH8AgTaqcqP2nGOXEUV+VKBR5JRN9hCk9ai+zQQ57vtm9oWeXguBCPNUjytgA== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/helper-validator-option" "^7.16.7" + "@babel/plugin-transform-react-display-name" "^7.16.7" + "@babel/plugin-transform-react-jsx" "^7.16.7" + "@babel/plugin-transform-react-jsx-development" "^7.16.7" + "@babel/plugin-transform-react-pure-annotations" "^7.16.7" + +"@babel/preset-typescript@^7.16.0": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/preset-typescript/-/preset-typescript-7.16.7.tgz#ab114d68bb2020afc069cd51b37ff98a046a70b9" + integrity sha512-WbVEmgXdIyvzB77AQjGBEyYPZx+8tTsO50XtfozQrkW8QB2rLJpH2lgx0TRw5EJrBxOZQ+wCcyPVQvS8tjEHpQ== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/helper-validator-option" "^7.16.7" + "@babel/plugin-transform-typescript" "^7.16.7" + +"@babel/runtime-corejs3@^7.10.2": + version "7.16.8" + resolved "https://registry.yarnpkg.com/@babel/runtime-corejs3/-/runtime-corejs3-7.16.8.tgz#ea533d96eda6fdc76b1812248e9fbd0c11d4a1a7" + integrity sha512-3fKhuICS1lMz0plI5ktOE/yEtBRMVxplzRkdn6mJQ197XiY0JnrzYV0+Mxozq3JZ8SBV9Ecurmw1XsGbwOf+Sg== + dependencies: + core-js-pure "^3.20.2" + regenerator-runtime "^0.13.4" + +"@babel/runtime@^7.0.0", "@babel/runtime@^7.10.2", "@babel/runtime@^7.11.2", "@babel/runtime@^7.12.13", "@babel/runtime@^7.12.5", "@babel/runtime@^7.13.10", "@babel/runtime@^7.15.4", "@babel/runtime@^7.16.3", "@babel/runtime@^7.5.5", "@babel/runtime@^7.6.2", "@babel/runtime@^7.7.2", "@babel/runtime@^7.7.6", "@babel/runtime@^7.8.4", "@babel/runtime@^7.9.2": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.16.7.tgz#03ff99f64106588c9c403c6ecb8c3bafbbdff1fa" + integrity sha512-9E9FJowqAsytyOY6LG+1KuueckRL+aQW+mKvXRXnuFGyRAyepJPmEo9vgMfXUA6O9u3IeEdv9MAkppFcaQwogQ== + dependencies: + regenerator-runtime "^0.13.4" + +"@babel/template@^7.16.7", "@babel/template@^7.3.3": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.16.7.tgz#8d126c8701fde4d66b264b3eba3d96f07666d155" + integrity sha512-I8j/x8kHUrbYRTUxXrrMbfCa7jxkE7tZre39x3kjr9hvI82cK1FfqLygotcWN5kdPGWcLdWMHpSBavse5tWw3w== + dependencies: + "@babel/code-frame" "^7.16.7" + "@babel/parser" "^7.16.7" + "@babel/types" "^7.16.7" + +"@babel/traverse@^7.13.0", "@babel/traverse@^7.16.10", "@babel/traverse@^7.16.7", "@babel/traverse@^7.16.8", "@babel/traverse@^7.7.2": + version "7.16.10" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.16.10.tgz#448f940defbe95b5a8029975b051f75993e8239f" + integrity sha512-yzuaYXoRJBGMlBhsMJoUW7G1UmSb/eXr/JHYM/MsOJgavJibLwASijW7oXBdw3NQ6T0bW7Ty5P/VarOs9cHmqw== + dependencies: + "@babel/code-frame" "^7.16.7" + "@babel/generator" "^7.16.8" + "@babel/helper-environment-visitor" "^7.16.7" + "@babel/helper-function-name" "^7.16.7" + "@babel/helper-hoist-variables" "^7.16.7" + "@babel/helper-split-export-declaration" "^7.16.7" + "@babel/parser" "^7.16.10" + "@babel/types" "^7.16.8" + debug "^4.1.0" + globals "^11.1.0" + +"@babel/traverse@^7.17.0", "@babel/traverse@^7.17.3": + version "7.17.3" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.17.3.tgz#0ae0f15b27d9a92ba1f2263358ea7c4e7db47b57" + integrity sha512-5irClVky7TxRWIRtxlh2WPUUOLhcPN06AGgaQSB8AEwuyEBgJVuJ5imdHm5zxk8w0QS5T+tDfnDxAlhWjpb7cw== + dependencies: + "@babel/code-frame" "^7.16.7" + "@babel/generator" "^7.17.3" + "@babel/helper-environment-visitor" "^7.16.7" + "@babel/helper-function-name" "^7.16.7" + "@babel/helper-hoist-variables" "^7.16.7" + "@babel/helper-split-export-declaration" "^7.16.7" + "@babel/parser" "^7.17.3" + "@babel/types" "^7.17.0" + debug "^4.1.0" + globals "^11.1.0" + +"@babel/types@^7.0.0", "@babel/types@^7.12.6", "@babel/types@^7.16.0", "@babel/types@^7.16.7", "@babel/types@^7.16.8", "@babel/types@^7.3.0", "@babel/types@^7.3.3", "@babel/types@^7.4.4": + version "7.16.8" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.16.8.tgz#0ba5da91dd71e0a4e7781a30f22770831062e3c1" + integrity sha512-smN2DQc5s4M7fntyjGtyIPbRJv6wW4rU/94fmYJ7PKQuZkC0qGMHXJbg6sNGt12JmVr4k5YaptI/XtiLJBnmIg== + dependencies: + "@babel/helper-validator-identifier" "^7.16.7" + to-fast-properties "^2.0.0" + +"@babel/types@^7.17.0": + version "7.17.0" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.17.0.tgz#a826e368bccb6b3d84acd76acad5c0d87342390b" + integrity sha512-TmKSNO4D5rzhL5bjWFcVHHLETzfQ/AmbKpKPOSjlP0WoHZ6L911fgoOKY4Alp/emzG4cHJdyN49zpgkbXFEHHw== + dependencies: + "@babel/helper-validator-identifier" "^7.16.7" + to-fast-properties "^2.0.0" + +"@bcoe/v8-coverage@^0.2.3": + version "0.2.3" + resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" + integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== + +"@csstools/normalize.css@*": + version "12.0.0" + resolved "https://registry.yarnpkg.com/@csstools/normalize.css/-/normalize.css-12.0.0.tgz#a9583a75c3f150667771f30b60d9f059473e62c4" + integrity sha512-M0qqxAcwCsIVfpFQSlGN5XjXWu8l5JDZN+fPt1LeW5SZexQTgnaEvgXAY+CeygRw0EeppWHi12JxESWiWrB0Sg== + +"@elastic/datemath@^5.0.3": + version "5.0.3" + resolved "https://registry.yarnpkg.com/@elastic/datemath/-/datemath-5.0.3.tgz#7baccdab672b9a3ecb7fe8387580670936b58573" + integrity sha512-8Hbr1Uyjm5OcYBfEB60K7sCP6U3IXuWDaLaQmYv3UxgI4jqBWbakoemwWvsqPVUvnwEjuX6z7ghPZbefs8xiaA== + dependencies: + tslib "^1.9.3" + +"@elastic/eui@^46.1.0": + version "46.1.0" + resolved "https://registry.yarnpkg.com/@elastic/eui/-/eui-46.1.0.tgz#b1d77a56d529001858b7fc91f6c2953288549336" + integrity sha512-j65NMzqKluVPtk3b6UYdVVaBha74Y8Y/N6QaHin9MmqUKoAPgevqHWvi0n/OmMpADdRCI0ldxkse6tgAvdaEHQ== + dependencies: + "@types/chroma-js" "^2.0.0" + "@types/lodash" "^4.14.160" + "@types/numeral" "^0.0.28" + "@types/react-beautiful-dnd" "^13.0.0" + "@types/react-input-autosize" "^2.2.0" + "@types/react-virtualized-auto-sizer" "^1.0.0" + "@types/react-window" "^1.8.2" + "@types/refractor" "^3.0.0" + "@types/resize-observer-browser" "^0.1.5" + "@types/vfile-message" "^2.0.0" + chroma-js "^2.1.0" + classnames "^2.2.6" + lodash "^4.17.21" + mdast-util-to-hast "^10.0.0" + numeral "^2.0.6" + prop-types "^15.6.0" + react-beautiful-dnd "^13.0.0" + react-dropzone "^11.2.0" + react-focus-on "^3.5.0" + react-input-autosize "^2.2.2" + react-is "~16.3.0" + react-virtualized-auto-sizer "^1.0.2" + react-window "^1.8.5" + refractor "^3.5.0" + rehype-raw "^5.0.0" + rehype-react "^6.0.0" + rehype-stringify "^8.0.0" + remark-breaks "^2.0.2" + remark-emoji "^2.1.0" + remark-parse "^8.0.3" + remark-rehype "^8.0.0" + tabbable "^3.0.0" + text-diff "^1.0.1" + unified "^9.2.0" + unist-util-visit "^2.0.3" + url-parse "^1.5.3" + uuid "^8.3.0" + vfile "^4.2.0" + +"@emotion/cache@^11.7.1": + version "11.7.1" + resolved "https://registry.yarnpkg.com/@emotion/cache/-/cache-11.7.1.tgz#08d080e396a42e0037848214e8aa7bf879065539" + integrity sha512-r65Zy4Iljb8oyjtLeCuBH8Qjiy107dOYC6SJq7g7GV5UCQWMObY4SJDPGFjiiVpPrOJ2hmJOoBiYTC7hwx9E2A== + dependencies: + "@emotion/memoize" "^0.7.4" + "@emotion/sheet" "^1.1.0" + "@emotion/utils" "^1.0.0" + "@emotion/weak-memoize" "^0.2.5" + stylis "4.0.13" + +"@emotion/hash@^0.8.0": + version "0.8.0" + resolved "https://registry.yarnpkg.com/@emotion/hash/-/hash-0.8.0.tgz#bbbff68978fefdbe68ccb533bc8cbe1d1afb5413" + integrity sha512-kBJtf7PH6aWwZ6fka3zQ0p6SBYzx4fl1LoZXE2RrnYST9Xljm7WfKJrU4g/Xr3Beg72MLrp1AWNUmuYJTL7Cow== + +"@emotion/memoize@^0.7.4": + version "0.7.5" + resolved "https://registry.yarnpkg.com/@emotion/memoize/-/memoize-0.7.5.tgz#2c40f81449a4e554e9fc6396910ed4843ec2be50" + integrity sha512-igX9a37DR2ZPGYtV6suZ6whr8pTFtyHL3K/oLUotxpSVO2ASaprmAe2Dkq7tBo7CRY7MMDrAa9nuQP9/YG8FxQ== + +"@emotion/react@^11.7.1": + version "11.7.1" + resolved "https://registry.yarnpkg.com/@emotion/react/-/react-11.7.1.tgz#3f800ce9b20317c13e77b8489ac4a0b922b2fe07" + integrity sha512-DV2Xe3yhkF1yT4uAUoJcYL1AmrnO5SVsdfvu+fBuS7IbByDeTVx9+wFmvx9Idzv7/78+9Mgx2Hcmr7Fex3tIyw== + dependencies: + "@babel/runtime" "^7.13.10" + "@emotion/cache" "^11.7.1" + "@emotion/serialize" "^1.0.2" + "@emotion/sheet" "^1.1.0" + "@emotion/utils" "^1.0.0" + "@emotion/weak-memoize" "^0.2.5" + hoist-non-react-statics "^3.3.1" + +"@emotion/serialize@^1.0.2": + version "1.0.2" + resolved "https://registry.yarnpkg.com/@emotion/serialize/-/serialize-1.0.2.tgz#77cb21a0571c9f68eb66087754a65fa97bfcd965" + integrity sha512-95MgNJ9+/ajxU7QIAruiOAdYNjxZX7G2mhgrtDWswA21VviYIRP1R5QilZ/bDY42xiKsaktP4egJb3QdYQZi1A== + dependencies: + "@emotion/hash" "^0.8.0" + "@emotion/memoize" "^0.7.4" + "@emotion/unitless" "^0.7.5" + "@emotion/utils" "^1.0.0" + csstype "^3.0.2" + +"@emotion/sheet@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@emotion/sheet/-/sheet-1.1.0.tgz#56d99c41f0a1cda2726a05aa6a20afd4c63e58d2" + integrity sha512-u0AX4aSo25sMAygCuQTzS+HsImZFuS8llY8O7b9MDRzbJM0kVJlAz6KNDqcG7pOuQZJmj/8X/rAW+66kMnMW+g== + +"@emotion/unitless@^0.7.5": + version "0.7.5" + resolved "https://registry.yarnpkg.com/@emotion/unitless/-/unitless-0.7.5.tgz#77211291c1900a700b8a78cfafda3160d76949ed" + integrity sha512-OWORNpfjMsSSUBVrRBVGECkhWcULOAJz9ZW8uK9qgxD+87M7jHRcvh/A96XXNhXTLmKcoYSQtBEX7lHMO7YRwg== + +"@emotion/utils@^1.0.0": + version "1.0.0" + resolved "https://registry.yarnpkg.com/@emotion/utils/-/utils-1.0.0.tgz#abe06a83160b10570816c913990245813a2fd6af" + integrity sha512-mQC2b3XLDs6QCW+pDQDiyO/EdGZYOygE8s5N5rrzjSI4M3IejPE/JPndCBwRT9z982aqQNi6beWs1UeayrQxxA== + +"@emotion/weak-memoize@^0.2.5": + version "0.2.5" + resolved "https://registry.yarnpkg.com/@emotion/weak-memoize/-/weak-memoize-0.2.5.tgz#8eed982e2ee6f7f4e44c253e12962980791efd46" + integrity sha512-6U71C2Wp7r5XtFtQzYrW5iKFT67OixrSxjI4MptCHzdSVlgabczzqLe0ZSgnub/5Kp4hSbpDB1tMytZY9pwxxA== + +"@eslint/eslintrc@^1.0.5": + version "1.0.5" + resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-1.0.5.tgz#33f1b838dbf1f923bfa517e008362b78ddbbf318" + integrity sha512-BLxsnmK3KyPunz5wmCCpqy0YelEoxxGmH73Is+Z74oOTMtExcjkr3dDR6quwrjh1YspA8DH9gnX1o069KiS9AQ== + dependencies: + ajv "^6.12.4" + debug "^4.3.2" + espree "^9.2.0" + globals "^13.9.0" + ignore "^4.0.6" + import-fresh "^3.2.1" + js-yaml "^4.1.0" + minimatch "^3.0.4" + strip-json-comments "^3.1.1" + +"@humanwhocodes/config-array@^0.9.2": + version "0.9.3" + resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.9.3.tgz#f2564c744b387775b436418491f15fce6601f63e" + integrity sha512-3xSMlXHh03hCcCmFc0rbKp3Ivt2PFEJnQUJDDMTJQ2wkECZWdq4GePs2ctc5H8zV+cHPaq8k2vU8mrQjA6iHdQ== + dependencies: + "@humanwhocodes/object-schema" "^1.2.1" + debug "^4.1.1" + minimatch "^3.0.4" + +"@humanwhocodes/object-schema@^1.2.1": + version "1.2.1" + resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz#b520529ec21d8e5945a1851dfd1c32e94e39ff45" + integrity sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA== + +"@istanbuljs/load-nyc-config@^1.0.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz#fd3db1d59ecf7cf121e80650bb86712f9b55eced" + integrity sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ== + dependencies: + camelcase "^5.3.1" + find-up "^4.1.0" + get-package-type "^0.1.0" + js-yaml "^3.13.1" + resolve-from "^5.0.0" + +"@istanbuljs/schema@^0.1.2": + version "0.1.3" + resolved "https://registry.yarnpkg.com/@istanbuljs/schema/-/schema-0.1.3.tgz#e45e384e4b8ec16bce2fd903af78450f6bf7ec98" + integrity sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA== + +"@jest/console@^27.4.6": + version "27.4.6" + resolved "https://registry.yarnpkg.com/@jest/console/-/console-27.4.6.tgz#0742e6787f682b22bdad56f9db2a8a77f6a86107" + integrity sha512-jauXyacQD33n47A44KrlOVeiXHEXDqapSdfb9kTekOchH/Pd18kBIO1+xxJQRLuG+LUuljFCwTG92ra4NW7SpA== + dependencies: + "@jest/types" "^27.4.2" + "@types/node" "*" + chalk "^4.0.0" + jest-message-util "^27.4.6" + jest-util "^27.4.2" + slash "^3.0.0" + +"@jest/core@^27.4.7": + version "27.4.7" + resolved "https://registry.yarnpkg.com/@jest/core/-/core-27.4.7.tgz#84eabdf42a25f1fa138272ed229bcf0a1b5e6913" + integrity sha512-n181PurSJkVMS+kClIFSX/LLvw9ExSb+4IMtD6YnfxZVerw9ANYtW0bPrm0MJu2pfe9SY9FJ9FtQ+MdZkrZwjg== + dependencies: + "@jest/console" "^27.4.6" + "@jest/reporters" "^27.4.6" + "@jest/test-result" "^27.4.6" + "@jest/transform" "^27.4.6" + "@jest/types" "^27.4.2" + "@types/node" "*" + ansi-escapes "^4.2.1" + chalk "^4.0.0" + emittery "^0.8.1" + exit "^0.1.2" + graceful-fs "^4.2.4" + jest-changed-files "^27.4.2" + jest-config "^27.4.7" + jest-haste-map "^27.4.6" + jest-message-util "^27.4.6" + jest-regex-util "^27.4.0" + jest-resolve "^27.4.6" + jest-resolve-dependencies "^27.4.6" + jest-runner "^27.4.6" + jest-runtime "^27.4.6" + jest-snapshot "^27.4.6" + jest-util "^27.4.2" + jest-validate "^27.4.6" + jest-watcher "^27.4.6" + micromatch "^4.0.4" + rimraf "^3.0.0" + slash "^3.0.0" + strip-ansi "^6.0.0" + +"@jest/environment@^27.4.6": + version "27.4.6" + resolved "https://registry.yarnpkg.com/@jest/environment/-/environment-27.4.6.tgz#1e92885d64f48c8454df35ed9779fbcf31c56d8b" + integrity sha512-E6t+RXPfATEEGVidr84WngLNWZ8ffCPky8RqqRK6u1Bn0LK92INe0MDttyPl/JOzaq92BmDzOeuqk09TvM22Sg== + dependencies: + "@jest/fake-timers" "^27.4.6" + "@jest/types" "^27.4.2" + "@types/node" "*" + jest-mock "^27.4.6" + +"@jest/fake-timers@^27.4.6": + version "27.4.6" + resolved "https://registry.yarnpkg.com/@jest/fake-timers/-/fake-timers-27.4.6.tgz#e026ae1671316dbd04a56945be2fa251204324e8" + integrity sha512-mfaethuYF8scV8ntPpiVGIHQgS0XIALbpY2jt2l7wb/bvq4Q5pDLk4EP4D7SAvYT1QrPOPVZAtbdGAOOyIgs7A== + dependencies: + "@jest/types" "^27.4.2" + "@sinonjs/fake-timers" "^8.0.1" + "@types/node" "*" + jest-message-util "^27.4.6" + jest-mock "^27.4.6" + jest-util "^27.4.2" + +"@jest/globals@^27.4.6": + version "27.4.6" + resolved "https://registry.yarnpkg.com/@jest/globals/-/globals-27.4.6.tgz#3f09bed64b0fd7f5f996920258bd4be8f52f060a" + integrity sha512-kAiwMGZ7UxrgPzu8Yv9uvWmXXxsy0GciNejlHvfPIfWkSxChzv6bgTS3YqBkGuHcis+ouMFI2696n2t+XYIeFw== + dependencies: + "@jest/environment" "^27.4.6" + "@jest/types" "^27.4.2" + expect "^27.4.6" + +"@jest/reporters@^27.4.6": + version "27.4.6" + resolved "https://registry.yarnpkg.com/@jest/reporters/-/reporters-27.4.6.tgz#b53dec3a93baf9b00826abf95b932de919d6d8dd" + integrity sha512-+Zo9gV81R14+PSq4wzee4GC2mhAN9i9a7qgJWL90Gpx7fHYkWpTBvwWNZUXvJByYR9tAVBdc8VxDWqfJyIUrIQ== + dependencies: + "@bcoe/v8-coverage" "^0.2.3" + "@jest/console" "^27.4.6" + "@jest/test-result" "^27.4.6" + "@jest/transform" "^27.4.6" + "@jest/types" "^27.4.2" + "@types/node" "*" + chalk "^4.0.0" + collect-v8-coverage "^1.0.0" + exit "^0.1.2" + glob "^7.1.2" + graceful-fs "^4.2.4" + istanbul-lib-coverage "^3.0.0" + istanbul-lib-instrument "^5.1.0" + istanbul-lib-report "^3.0.0" + istanbul-lib-source-maps "^4.0.0" + istanbul-reports "^3.1.3" + jest-haste-map "^27.4.6" + jest-resolve "^27.4.6" + jest-util "^27.4.2" + jest-worker "^27.4.6" + slash "^3.0.0" + source-map "^0.6.0" + string-length "^4.0.1" + terminal-link "^2.0.0" + v8-to-istanbul "^8.1.0" + +"@jest/source-map@^27.4.0": + version "27.4.0" + resolved "https://registry.yarnpkg.com/@jest/source-map/-/source-map-27.4.0.tgz#2f0385d0d884fb3e2554e8f71f8fa957af9a74b6" + integrity sha512-Ntjx9jzP26Bvhbm93z/AKcPRj/9wrkI88/gK60glXDx1q+IeI0rf7Lw2c89Ch6ofonB0On/iRDreQuQ6te9pgQ== + dependencies: + callsites "^3.0.0" + graceful-fs "^4.2.4" + source-map "^0.6.0" + +"@jest/test-result@^27.4.6": + version "27.4.6" + resolved "https://registry.yarnpkg.com/@jest/test-result/-/test-result-27.4.6.tgz#b3df94c3d899c040f602cea296979844f61bdf69" + integrity sha512-fi9IGj3fkOrlMmhQqa/t9xum8jaJOOAi/lZlm6JXSc55rJMXKHxNDN1oCP39B0/DhNOa2OMupF9BcKZnNtXMOQ== + dependencies: + "@jest/console" "^27.4.6" + "@jest/types" "^27.4.2" + "@types/istanbul-lib-coverage" "^2.0.0" + collect-v8-coverage "^1.0.0" + +"@jest/test-sequencer@^27.4.6": + version "27.4.6" + resolved "https://registry.yarnpkg.com/@jest/test-sequencer/-/test-sequencer-27.4.6.tgz#447339b8a3d7b5436f50934df30854e442a9d904" + integrity sha512-3GL+nsf6E1PsyNsJuvPyIz+DwFuCtBdtvPpm/LMXVkBJbdFvQYCDpccYT56qq5BGniXWlE81n2qk1sdXfZebnw== + dependencies: + "@jest/test-result" "^27.4.6" + graceful-fs "^4.2.4" + jest-haste-map "^27.4.6" + jest-runtime "^27.4.6" + +"@jest/transform@^27.4.6": + version "27.4.6" + resolved "https://registry.yarnpkg.com/@jest/transform/-/transform-27.4.6.tgz#153621940b1ed500305eacdb31105d415dc30231" + integrity sha512-9MsufmJC8t5JTpWEQJ0OcOOAXaH5ioaIX6uHVBLBMoCZPfKKQF+EqP8kACAvCZ0Y1h2Zr3uOccg8re+Dr5jxyw== + dependencies: + "@babel/core" "^7.1.0" + "@jest/types" "^27.4.2" + babel-plugin-istanbul "^6.1.1" + chalk "^4.0.0" + convert-source-map "^1.4.0" + fast-json-stable-stringify "^2.0.0" + graceful-fs "^4.2.4" + jest-haste-map "^27.4.6" + jest-regex-util "^27.4.0" + jest-util "^27.4.2" + micromatch "^4.0.4" + pirates "^4.0.4" + slash "^3.0.0" + source-map "^0.6.1" + write-file-atomic "^3.0.0" + +"@jest/types@^27.4.2": + version "27.4.2" + resolved "https://registry.yarnpkg.com/@jest/types/-/types-27.4.2.tgz#96536ebd34da6392c2b7c7737d693885b5dd44a5" + integrity sha512-j35yw0PMTPpZsUoOBiuHzr1zTYoad1cVIE0ajEjcrJONxxrko/IRGKkXx3os0Nsi4Hu3+5VmDbVfq5WhG/pWAg== + dependencies: + "@types/istanbul-lib-coverage" "^2.0.0" + "@types/istanbul-reports" "^3.0.0" + "@types/node" "*" + "@types/yargs" "^16.0.0" + chalk "^4.0.0" + +"@jridgewell/resolve-uri@^3.0.3": + version "3.0.5" + resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.0.5.tgz#68eb521368db76d040a6315cdb24bf2483037b9c" + integrity sha512-VPeQ7+wH0itvQxnG+lIzWgkysKIr3L9sslimFW55rHMdGu/qCQ5z5h9zq4gI8uBtqkpHhsF4Z/OwExufUCThew== + +"@jridgewell/sourcemap-codec@^1.4.10": + version "1.4.11" + resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.11.tgz#771a1d8d744eeb71b6adb35808e1a6c7b9b8c8ec" + integrity sha512-Fg32GrJo61m+VqYSdRSjRXMjQ06j8YIYfcTqndLYVAaHmroZHLJZCydsWBOTDqXS2v+mjxohBWEMfg97GXmYQg== + +"@jridgewell/trace-mapping@^0.3.0": + version "0.3.4" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.4.tgz#f6a0832dffd5b8a6aaa633b7d9f8e8e94c83a0c3" + integrity sha512-vFv9ttIedivx0ux3QSjhgtCVjPZd5l46ZOMDSCwnH1yUO2e964gO8LZGyv2QkqcgR6TnBU1v+1IFqmeoG+0UJQ== + dependencies: + "@jridgewell/resolve-uri" "^3.0.3" + "@jridgewell/sourcemap-codec" "^1.4.10" + +"@mapbox/hast-util-table-cell-style@^0.2.0": + version "0.2.0" + resolved "https://registry.yarnpkg.com/@mapbox/hast-util-table-cell-style/-/hast-util-table-cell-style-0.2.0.tgz#1003f59d54fae6f638cb5646f52110fb3da95b4d" + integrity sha512-gqaTIGC8My3LVSnU38IwjHVKJC94HSonjvFHDk8/aSrApL8v4uWgm8zJkK7MJIIbHuNOr/+Mv2KkQKcxs6LEZA== + dependencies: + unist-util-visit "^1.4.1" + +"@mswjs/cookies@^0.1.7": + version "0.1.7" + resolved "https://registry.yarnpkg.com/@mswjs/cookies/-/cookies-0.1.7.tgz#d334081b2c51057a61c1dd7b76ca3cac02251651" + integrity sha512-bDg1ReMBx+PYDB4Pk7y1Q07Zz1iKIEUWQpkEXiA2lEWg9gvOZ8UBmGXilCEUvyYoRFlmr/9iXTRR69TrgSwX/Q== + dependencies: + "@types/set-cookie-parser" "^2.4.0" + set-cookie-parser "^2.4.6" + +"@mswjs/interceptors@^0.12.7": + version "0.12.7" + resolved "https://registry.yarnpkg.com/@mswjs/interceptors/-/interceptors-0.12.7.tgz#0d1cd4cd31a0f663e0455993951201faa09d0909" + integrity sha512-eGjZ3JRAt0Fzi5FgXiV/P3bJGj0NqsN7vBS0J0FO2AQRQ0jCKQS4lEFm4wvlSgKQNfeuc/Vz6d81VtU3Gkx/zg== + dependencies: + "@open-draft/until" "^1.0.3" + "@xmldom/xmldom" "^0.7.2" + debug "^4.3.2" + headers-utils "^3.0.2" + outvariant "^1.2.0" + strict-event-emitter "^0.2.0" + +"@nodelib/fs.scandir@2.1.5": + version "2.1.5" + resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5" + integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g== + dependencies: + "@nodelib/fs.stat" "2.0.5" + run-parallel "^1.1.9" + +"@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": + version "2.0.5" + resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b" + integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== + +"@nodelib/fs.walk@^1.2.3": + version "1.2.8" + resolved "https://registry.yarnpkg.com/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a" + integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg== + dependencies: + "@nodelib/fs.scandir" "2.1.5" + fastq "^1.6.0" + +"@open-draft/until@^1.0.3": + version "1.0.3" + resolved "https://registry.yarnpkg.com/@open-draft/until/-/until-1.0.3.tgz#db9cc719191a62e7d9200f6e7bab21c5b848adca" + integrity sha512-Aq58f5HiWdyDlFffbbSjAlv596h/cOnt2DO1w3DOC7OJ5EHs0hd/nycJfiu9RJbT6Yk6F1knnRRXNSpxoIVZ9Q== + +"@pmmmwh/react-refresh-webpack-plugin@^0.5.3": + version "0.5.4" + resolved "https://registry.yarnpkg.com/@pmmmwh/react-refresh-webpack-plugin/-/react-refresh-webpack-plugin-0.5.4.tgz#df0d0d855fc527db48aac93c218a0bf4ada41f99" + integrity sha512-zZbZeHQDnoTlt2AF+diQT0wsSXpvWiaIOZwBRdltNFhG1+I3ozyaw7U/nBiUwyJ0D+zwdXp0E3bWOl38Ag2BMw== + dependencies: + ansi-html-community "^0.0.8" + common-path-prefix "^3.0.0" + core-js-pure "^3.8.1" + error-stack-parser "^2.0.6" + find-up "^5.0.0" + html-entities "^2.1.0" + loader-utils "^2.0.0" + schema-utils "^3.0.0" + source-map "^0.7.3" + +"@rollup/plugin-babel@^5.2.0": + version "5.3.0" + resolved "https://registry.yarnpkg.com/@rollup/plugin-babel/-/plugin-babel-5.3.0.tgz#9cb1c5146ddd6a4968ad96f209c50c62f92f9879" + integrity sha512-9uIC8HZOnVLrLHxayq/PTzw+uS25E14KPUBh5ktF+18Mjo5yK0ToMMx6epY0uEgkjwJw0aBW4x2horYXh8juWw== + dependencies: + "@babel/helper-module-imports" "^7.10.4" + "@rollup/pluginutils" "^3.1.0" + +"@rollup/plugin-babel@^5.3.1": + version "5.3.1" + resolved "https://registry.yarnpkg.com/@rollup/plugin-babel/-/plugin-babel-5.3.1.tgz#04bc0608f4aa4b2e4b1aebf284344d0f68fda283" + integrity sha512-WFfdLWU/xVWKeRQnKmIAQULUI7Il0gZnBIH/ZFO069wYIfPu+8zrfp/KMW0atmELoRDq8FbiP3VCss9MhCut7Q== + dependencies: + "@babel/helper-module-imports" "^7.10.4" + "@rollup/pluginutils" "^3.1.0" + +"@rollup/plugin-commonjs@^21.0.2": + version "21.0.2" + resolved "https://registry.yarnpkg.com/@rollup/plugin-commonjs/-/plugin-commonjs-21.0.2.tgz#0b9c539aa1837c94abfaf87945838b0fc8564891" + integrity sha512-d/OmjaLVO4j/aQX69bwpWPpbvI3TJkQuxoAk7BH8ew1PyoMBLTOuvJTjzG8oEoW7drIIqB0KCJtfFLu/2GClWg== + dependencies: + "@rollup/pluginutils" "^3.1.0" + commondir "^1.0.1" + estree-walker "^2.0.1" + glob "^7.1.6" + is-reference "^1.2.1" + magic-string "^0.25.7" + resolve "^1.17.0" + +"@rollup/plugin-json@^4.1.0": + version "4.1.0" + resolved "https://registry.yarnpkg.com/@rollup/plugin-json/-/plugin-json-4.1.0.tgz#54e09867ae6963c593844d8bd7a9c718294496f3" + integrity sha512-yfLbTdNS6amI/2OpmbiBoW12vngr5NW2jCJVZSBEz+H5KfUJZ2M7sDjk0U6GOOdCWFVScShte29o9NezJ53TPw== + dependencies: + "@rollup/pluginutils" "^3.0.8" + +"@rollup/plugin-node-resolve@^11.2.1": + version "11.2.1" + resolved "https://registry.yarnpkg.com/@rollup/plugin-node-resolve/-/plugin-node-resolve-11.2.1.tgz#82aa59397a29cd4e13248b106e6a4a1880362a60" + integrity sha512-yc2n43jcqVyGE2sqV5/YCmocy9ArjVAP/BeXyTtADTBBX6V0e5UMqwO8CdQ0kzjb6zu5P1qMzsScCMRvE9OlVg== + dependencies: + "@rollup/pluginutils" "^3.1.0" + "@types/resolve" "1.17.1" + builtin-modules "^3.1.0" + deepmerge "^4.2.2" + is-module "^1.0.0" + resolve "^1.19.0" + +"@rollup/plugin-node-resolve@^13.1.3": + version "13.1.3" + resolved "https://registry.yarnpkg.com/@rollup/plugin-node-resolve/-/plugin-node-resolve-13.1.3.tgz#2ed277fb3ad98745424c1d2ba152484508a92d79" + integrity sha512-BdxNk+LtmElRo5d06MGY4zoepyrXX1tkzX2hrnPEZ53k78GuOMWLqmJDGIIOPwVRIFZrLQOo+Yr6KtCuLIA0AQ== + dependencies: + "@rollup/pluginutils" "^3.1.0" + "@types/resolve" "1.17.1" + builtin-modules "^3.1.0" + deepmerge "^4.2.2" + is-module "^1.0.0" + resolve "^1.19.0" + +"@rollup/plugin-replace@^2.4.1": + version "2.4.2" + resolved "https://registry.yarnpkg.com/@rollup/plugin-replace/-/plugin-replace-2.4.2.tgz#a2d539314fbc77c244858faa523012825068510a" + integrity sha512-IGcu+cydlUMZ5En85jxHH4qj2hta/11BHq95iHEyb2sbgiN0eCdzvUcHw5gt9pBL5lTi4JDYJ1acCoMGpTvEZg== + dependencies: + "@rollup/pluginutils" "^3.1.0" + magic-string "^0.25.7" + +"@rollup/plugin-typescript@^8.3.1": + version "8.3.1" + resolved "https://registry.yarnpkg.com/@rollup/plugin-typescript/-/plugin-typescript-8.3.1.tgz#b7dc75ed6b4876e260b9e80624fab23bc98e4ac1" + integrity sha512-84rExe3ICUBXzqNX48WZV2Jp3OddjTMX97O2Py6D1KJaGSwWp0mDHXj+bCGNJqWHIEKDIT2U0sDjhP4czKi6cA== + dependencies: + "@rollup/pluginutils" "^3.1.0" + resolve "^1.17.0" + +"@rollup/pluginutils@^3.0.8", "@rollup/pluginutils@^3.1.0": + version "3.1.0" + resolved "https://registry.yarnpkg.com/@rollup/pluginutils/-/pluginutils-3.1.0.tgz#706b4524ee6dc8b103b3c995533e5ad680c02b9b" + integrity sha512-GksZ6pr6TpIjHm8h9lSQ8pi8BE9VeubNT0OMJ3B5uZJ8pz73NPiqOtCog/x2/QzM1ENChPKxMDhiQuRHsqc+lg== + dependencies: + "@types/estree" "0.0.39" + estree-walker "^1.0.1" + picomatch "^2.2.2" + +"@rollup/pluginutils@^4.1.1": + version "4.1.2" + resolved "https://registry.yarnpkg.com/@rollup/pluginutils/-/pluginutils-4.1.2.tgz#ed5821c15e5e05e32816f5fb9ec607cdf5a75751" + integrity sha512-ROn4qvkxP9SyPeHaf7uQC/GPFY6L/OWy9+bd9AwcjOAWQwxRscoEyAUD8qCY5o5iL4jqQwoLk2kaTKJPb/HwzQ== + dependencies: + estree-walker "^2.0.1" + picomatch "^2.2.2" + +"@rushstack/eslint-patch@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@rushstack/eslint-patch/-/eslint-patch-1.1.0.tgz#7f698254aadf921e48dda8c0a6b304026b8a9323" + integrity sha512-JLo+Y592QzIE+q7Dl2pMUtt4q8SKYI5jDrZxrozEQxnGVOyYE+GWK9eLkwTaeN9DDctlaRAQ3TBmzZ1qdLE30A== + +"@sinonjs/commons@^1.7.0": + version "1.8.3" + resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-1.8.3.tgz#3802ddd21a50a949b6721ddd72da36e67e7f1b2d" + integrity sha512-xkNcLAn/wZaX14RPlwizcKicDk9G3F8m2nU3L7Ukm5zBgTwiT0wsoFAHx9Jq56fJA1z/7uKGtCRu16sOUCLIHQ== + dependencies: + type-detect "4.0.8" + +"@sinonjs/fake-timers@^8.0.1": + version "8.1.0" + resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-8.1.0.tgz#3fdc2b6cb58935b21bfb8d1625eb1300484316e7" + integrity sha512-OAPJUAtgeINhh/TAlUID4QTs53Njm7xzddaVlEs/SXwgtiD1tW22zAB/W1wdqfrpmikgaWQ9Fw6Ws+hsiRm5Vg== + dependencies: + "@sinonjs/commons" "^1.7.0" + +"@surma/rollup-plugin-off-main-thread@^2.2.3": + version "2.2.3" + resolved "https://registry.yarnpkg.com/@surma/rollup-plugin-off-main-thread/-/rollup-plugin-off-main-thread-2.2.3.tgz#ee34985952ca21558ab0d952f00298ad2190c053" + integrity sha512-lR8q/9W7hZpMWweNiAKU7NQerBnzQQLvi8qnTDU/fxItPhtZVMbPV3lbCwjhIlNBe9Bbr5V+KHshvWmVSG9cxQ== + dependencies: + ejs "^3.1.6" + json5 "^2.2.0" + magic-string "^0.25.0" + string.prototype.matchall "^4.0.6" + +"@svgr/babel-plugin-add-jsx-attribute@^5.4.0": + version "5.4.0" + resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-5.4.0.tgz#81ef61947bb268eb9d50523446f9c638fb355906" + integrity sha512-ZFf2gs/8/6B8PnSofI0inYXr2SDNTDScPXhN7k5EqD4aZ3gi6u+rbmZHVB8IM3wDyx8ntKACZbtXSm7oZGRqVg== + +"@svgr/babel-plugin-remove-jsx-attribute@^5.4.0": + version "5.4.0" + resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-remove-jsx-attribute/-/babel-plugin-remove-jsx-attribute-5.4.0.tgz#6b2c770c95c874654fd5e1d5ef475b78a0a962ef" + integrity sha512-yaS4o2PgUtwLFGTKbsiAy6D0o3ugcUhWK0Z45umJ66EPWunAz9fuFw2gJuje6wqQvQWOTJvIahUwndOXb7QCPg== + +"@svgr/babel-plugin-remove-jsx-empty-expression@^5.0.1": + version "5.0.1" + resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-remove-jsx-empty-expression/-/babel-plugin-remove-jsx-empty-expression-5.0.1.tgz#25621a8915ed7ad70da6cea3d0a6dbc2ea933efd" + integrity sha512-LA72+88A11ND/yFIMzyuLRSMJ+tRKeYKeQ+mR3DcAZ5I4h5CPWN9AHyUzJbWSYp/u2u0xhmgOe0+E41+GjEueA== + +"@svgr/babel-plugin-replace-jsx-attribute-value@^5.0.1": + version "5.0.1" + resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-replace-jsx-attribute-value/-/babel-plugin-replace-jsx-attribute-value-5.0.1.tgz#0b221fc57f9fcd10e91fe219e2cd0dd03145a897" + integrity sha512-PoiE6ZD2Eiy5mK+fjHqwGOS+IXX0wq/YDtNyIgOrc6ejFnxN4b13pRpiIPbtPwHEc+NT2KCjteAcq33/F1Y9KQ== + +"@svgr/babel-plugin-svg-dynamic-title@^5.4.0": + version "5.4.0" + resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-svg-dynamic-title/-/babel-plugin-svg-dynamic-title-5.4.0.tgz#139b546dd0c3186b6e5db4fefc26cb0baea729d7" + integrity sha512-zSOZH8PdZOpuG1ZVx/cLVePB2ibo3WPpqo7gFIjLV9a0QsuQAzJiwwqmuEdTaW2pegyBE17Uu15mOgOcgabQZg== + +"@svgr/babel-plugin-svg-em-dimensions@^5.4.0": + version "5.4.0" + resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-svg-em-dimensions/-/babel-plugin-svg-em-dimensions-5.4.0.tgz#6543f69526632a133ce5cabab965deeaea2234a0" + integrity sha512-cPzDbDA5oT/sPXDCUYoVXEmm3VIoAWAPT6mSPTJNbQaBNUuEKVKyGH93oDY4e42PYHRW67N5alJx/eEol20abw== + +"@svgr/babel-plugin-transform-react-native-svg@^5.4.0": + version "5.4.0" + resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-transform-react-native-svg/-/babel-plugin-transform-react-native-svg-5.4.0.tgz#00bf9a7a73f1cad3948cdab1f8dfb774750f8c80" + integrity sha512-3eYP/SaopZ41GHwXma7Rmxcv9uRslRDTY1estspeB1w1ueZWd/tPlMfEOoccYpEMZU3jD4OU7YitnXcF5hLW2Q== + +"@svgr/babel-plugin-transform-svg-component@^5.5.0": + version "5.5.0" + resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-transform-svg-component/-/babel-plugin-transform-svg-component-5.5.0.tgz#583a5e2a193e214da2f3afeb0b9e8d3250126b4a" + integrity sha512-q4jSH1UUvbrsOtlo/tKcgSeiCHRSBdXoIoqX1pgcKK/aU3JD27wmMKwGtpB8qRYUYoyXvfGxUVKchLuR5pB3rQ== + +"@svgr/babel-preset@^5.5.0": + version "5.5.0" + resolved "https://registry.yarnpkg.com/@svgr/babel-preset/-/babel-preset-5.5.0.tgz#8af54f3e0a8add7b1e2b0fcd5a882c55393df327" + integrity sha512-4FiXBjvQ+z2j7yASeGPEi8VD/5rrGQk4Xrq3EdJmoZgz/tpqChpo5hgXDvmEauwtvOc52q8ghhZK4Oy7qph4ig== + dependencies: + "@svgr/babel-plugin-add-jsx-attribute" "^5.4.0" + "@svgr/babel-plugin-remove-jsx-attribute" "^5.4.0" + "@svgr/babel-plugin-remove-jsx-empty-expression" "^5.0.1" + "@svgr/babel-plugin-replace-jsx-attribute-value" "^5.0.1" + "@svgr/babel-plugin-svg-dynamic-title" "^5.4.0" + "@svgr/babel-plugin-svg-em-dimensions" "^5.4.0" + "@svgr/babel-plugin-transform-react-native-svg" "^5.4.0" + "@svgr/babel-plugin-transform-svg-component" "^5.5.0" + +"@svgr/core@^5.5.0": + version "5.5.0" + resolved "https://registry.yarnpkg.com/@svgr/core/-/core-5.5.0.tgz#82e826b8715d71083120fe8f2492ec7d7874a579" + integrity sha512-q52VOcsJPvV3jO1wkPtzTuKlvX7Y3xIcWRpCMtBF3MrteZJtBfQw/+u0B1BHy5ColpQc1/YVTrPEtSYIMNZlrQ== + dependencies: + "@svgr/plugin-jsx" "^5.5.0" + camelcase "^6.2.0" + cosmiconfig "^7.0.0" + +"@svgr/hast-util-to-babel-ast@^5.5.0": + version "5.5.0" + resolved "https://registry.yarnpkg.com/@svgr/hast-util-to-babel-ast/-/hast-util-to-babel-ast-5.5.0.tgz#5ee52a9c2533f73e63f8f22b779f93cd432a5461" + integrity sha512-cAaR/CAiZRB8GP32N+1jocovUtvlj0+e65TB50/6Lcime+EA49m/8l+P2ko+XPJ4dw3xaPS3jOL4F2X4KWxoeQ== + dependencies: + "@babel/types" "^7.12.6" + +"@svgr/plugin-jsx@^5.5.0": + version "5.5.0" + resolved "https://registry.yarnpkg.com/@svgr/plugin-jsx/-/plugin-jsx-5.5.0.tgz#1aa8cd798a1db7173ac043466d7b52236b369000" + integrity sha512-V/wVh33j12hGh05IDg8GpIUXbjAPnTdPTKuP4VNLggnwaHMPNQNae2pRnyTAILWCQdz5GyMqtO488g7CKM8CBA== + dependencies: + "@babel/core" "^7.12.3" + "@svgr/babel-preset" "^5.5.0" + "@svgr/hast-util-to-babel-ast" "^5.5.0" + svg-parser "^2.0.2" + +"@svgr/plugin-svgo@^5.5.0": + version "5.5.0" + resolved "https://registry.yarnpkg.com/@svgr/plugin-svgo/-/plugin-svgo-5.5.0.tgz#02da55d85320549324e201c7b2e53bf431fcc246" + integrity sha512-r5swKk46GuQl4RrVejVwpeeJaydoxkdwkM1mBKOgJLBUJPGaLci6ylg/IjhrRsREKDkr4kbMWdgOtbXEh0fyLQ== + dependencies: + cosmiconfig "^7.0.0" + deepmerge "^4.2.2" + svgo "^1.2.2" + +"@svgr/webpack@^5.5.0": + version "5.5.0" + resolved "https://registry.yarnpkg.com/@svgr/webpack/-/webpack-5.5.0.tgz#aae858ee579f5fa8ce6c3166ef56c6a1b381b640" + integrity sha512-DOBOK255wfQxguUta2INKkzPj6AIS6iafZYiYmHn6W3pHlycSRRlvWKCfLDG10fXfLWqE3DJHgRUOyJYmARa7g== + dependencies: + "@babel/core" "^7.12.3" + "@babel/plugin-transform-react-constant-elements" "^7.12.1" + "@babel/preset-env" "^7.12.1" + "@babel/preset-react" "^7.12.5" + "@svgr/core" "^5.5.0" + "@svgr/plugin-jsx" "^5.5.0" + "@svgr/plugin-svgo" "^5.5.0" + loader-utils "^2.0.0" + +"@testing-library/dom@^8.0.0": + version "8.11.3" + resolved "https://registry.yarnpkg.com/@testing-library/dom/-/dom-8.11.3.tgz#38fd63cbfe14557021e88982d931e33fb7c1a808" + integrity sha512-9LId28I+lx70wUiZjLvi1DB/WT2zGOxUh46glrSNMaWVx849kKAluezVzZrXJfTKKoQTmEOutLes/bHg4Bj3aA== + dependencies: + "@babel/code-frame" "^7.10.4" + "@babel/runtime" "^7.12.5" + "@types/aria-query" "^4.2.0" + aria-query "^5.0.0" + chalk "^4.1.0" + dom-accessibility-api "^0.5.9" + lz-string "^1.4.4" + pretty-format "^27.0.2" + +"@testing-library/jest-dom@^5.14.1": + version "5.16.1" + resolved "https://registry.yarnpkg.com/@testing-library/jest-dom/-/jest-dom-5.16.1.tgz#3db7df5ae97596264a7da9696fe14695ba02e51f" + integrity sha512-ajUJdfDIuTCadB79ukO+0l8O+QwN0LiSxDaYUTI4LndbbUsGi6rWU1SCexXzBA2NSjlVB9/vbkasQIL3tmPBjw== + dependencies: + "@babel/runtime" "^7.9.2" + "@types/testing-library__jest-dom" "^5.9.1" + aria-query "^5.0.0" + chalk "^3.0.0" + css "^3.0.0" + css.escape "^1.5.1" + dom-accessibility-api "^0.5.6" + lodash "^4.17.15" + redent "^3.0.0" + +"@testing-library/react@^12.0.0": + version "12.1.2" + resolved "https://registry.yarnpkg.com/@testing-library/react/-/react-12.1.2.tgz#f1bc9a45943461fa2a598bb4597df1ae044cfc76" + integrity sha512-ihQiEOklNyHIpo2Y8FREkyD1QAea054U0MVbwH1m8N9TxeFz+KoJ9LkqoKqJlzx2JDm56DVwaJ1r36JYxZM05g== + dependencies: + "@babel/runtime" "^7.12.5" + "@testing-library/dom" "^8.0.0" + +"@testing-library/user-event@^13.2.1": + version "13.5.0" + resolved "https://registry.yarnpkg.com/@testing-library/user-event/-/user-event-13.5.0.tgz#69d77007f1e124d55314a2b73fd204b333b13295" + integrity sha512-5Kwtbo3Y/NowpkbRuSepbyMFkZmHgD+vPzYB/RJ4oxt5Gj/avFFBYjhw27cqSVPVw/3a67NK1PbiIr9k4Gwmdg== + dependencies: + "@babel/runtime" "^7.12.5" + +"@tootallnate/once@1": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-1.1.2.tgz#ccb91445360179a04e7fe6aff78c00ffc1eeaf82" + integrity sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw== + +"@trysound/sax@0.2.0": + version "0.2.0" + resolved "https://registry.yarnpkg.com/@trysound/sax/-/sax-0.2.0.tgz#cccaab758af56761eb7bf37af6f03f326dd798ad" + integrity sha512-L7z9BgrNEcYyUYtF+HaEfiS5ebkh9jXqbszz7pC0hRBPaatV0XjSD3+eHrpqFemQfgwiFF0QPIarnIihIDn7OA== + +"@types/aria-query@^4.2.0": + version "4.2.2" + resolved "https://registry.yarnpkg.com/@types/aria-query/-/aria-query-4.2.2.tgz#ed4e0ad92306a704f9fb132a0cfcf77486dbe2bc" + integrity sha512-HnYpAE1Y6kRyKM/XkEuiRQhTHvkzMBurTHnpFLYLBGPIylZNPs9jJcuOOYWxPLJCSEtmZT0Y8rHDokKN7rRTig== + +"@types/babel__core@^7.0.0", "@types/babel__core@^7.1.14": + version "7.1.18" + resolved "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.1.18.tgz#1a29abcc411a9c05e2094c98f9a1b7da6cdf49f8" + integrity sha512-S7unDjm/C7z2A2R9NzfKCK1I+BAALDtxEmsJBwlB3EzNfb929ykjL++1CK9LO++EIp2fQrC8O+BwjKvz6UeDyQ== + dependencies: + "@babel/parser" "^7.1.0" + "@babel/types" "^7.0.0" + "@types/babel__generator" "*" + "@types/babel__template" "*" + "@types/babel__traverse" "*" + +"@types/babel__generator@*": + version "7.6.4" + resolved "https://registry.yarnpkg.com/@types/babel__generator/-/babel__generator-7.6.4.tgz#1f20ce4c5b1990b37900b63f050182d28c2439b7" + integrity sha512-tFkciB9j2K755yrTALxD44McOrk+gfpIpvC3sxHjRawj6PfnQxrse4Clq5y/Rq+G3mrBurMax/lG8Qn2t9mSsg== + dependencies: + "@babel/types" "^7.0.0" + +"@types/babel__template@*": + version "7.4.1" + resolved "https://registry.yarnpkg.com/@types/babel__template/-/babel__template-7.4.1.tgz#3d1a48fd9d6c0edfd56f2ff578daed48f36c8969" + integrity sha512-azBFKemX6kMg5Io+/rdGT0dkGreboUVR0Cdm3fz9QJWpaQGJRQXl7C+6hOTCZcMll7KFyEQpgbYI2lHdsS4U7g== + dependencies: + "@babel/parser" "^7.1.0" + "@babel/types" "^7.0.0" + +"@types/babel__traverse@*", "@types/babel__traverse@^7.0.4", "@types/babel__traverse@^7.0.6": + version "7.14.2" + resolved "https://registry.yarnpkg.com/@types/babel__traverse/-/babel__traverse-7.14.2.tgz#ffcd470bbb3f8bf30481678fb5502278ca833a43" + integrity sha512-K2waXdXBi2302XUdcHcR1jCeU0LL4TD9HRs/gk0N2Xvrht+G/BfJa4QObBQZfhMdxiCpV3COl5Nfq4uKTeTnJA== + dependencies: + "@babel/types" "^7.3.0" + +"@types/body-parser@*": + version "1.19.2" + resolved "https://registry.yarnpkg.com/@types/body-parser/-/body-parser-1.19.2.tgz#aea2059e28b7658639081347ac4fab3de166e6f0" + integrity sha512-ALYone6pm6QmwZoAgeyNksccT9Q4AWZQ6PvfwR37GT6r6FWUPguq6sUmNGSMV2Wr761oQoBxwGGa6DR5o1DC9g== + dependencies: + "@types/connect" "*" + "@types/node" "*" + +"@types/bonjour@^3.5.9": + version "3.5.10" + resolved "https://registry.yarnpkg.com/@types/bonjour/-/bonjour-3.5.10.tgz#0f6aadfe00ea414edc86f5d106357cda9701e275" + integrity sha512-p7ienRMiS41Nu2/igbJxxLDWrSZ0WxM8UQgCeO9KhoVF7cOVFkrKsiDr1EsJIla8vV3oEEjGcz11jc5yimhzZw== + dependencies: + "@types/node" "*" + +"@types/chroma-js@^2.0.0": + version "2.1.3" + resolved "https://registry.yarnpkg.com/@types/chroma-js/-/chroma-js-2.1.3.tgz#0b03d737ff28fad10eb884e0c6cedd5ffdc4ba0a" + integrity sha512-1xGPhoSGY1CPmXLCBcjVZSQinFjL26vlR8ZqprsBWiFyED4JacJJ9zHhh5aaUXqbY9B37mKQ73nlydVAXmr1+g== + +"@types/connect-history-api-fallback@^1.3.5": + version "1.3.5" + resolved "https://registry.yarnpkg.com/@types/connect-history-api-fallback/-/connect-history-api-fallback-1.3.5.tgz#d1f7a8a09d0ed5a57aee5ae9c18ab9b803205dae" + integrity sha512-h8QJa8xSb1WD4fpKBDcATDNGXghFj6/3GRWG6dhmRcu0RX1Ubasur2Uvx5aeEwlf0MwblEC2bMzzMQntxnw/Cw== + dependencies: + "@types/express-serve-static-core" "*" + "@types/node" "*" + +"@types/connect@*": + version "3.4.35" + resolved "https://registry.yarnpkg.com/@types/connect/-/connect-3.4.35.tgz#5fcf6ae445e4021d1fc2219a4873cc73a3bb2ad1" + integrity sha512-cdeYyv4KWoEgpBISTxWvqYsVy444DOqehiF3fM3ne10AmJ62RSyNkUnxMJXHQWRQQX2eR94m5y1IZyDwBjV9FQ== + dependencies: + "@types/node" "*" + +"@types/cookie@^0.4.1": + version "0.4.1" + resolved "https://registry.yarnpkg.com/@types/cookie/-/cookie-0.4.1.tgz#bfd02c1f2224567676c1545199f87c3a861d878d" + integrity sha512-XW/Aa8APYr6jSVVA1y/DEIZX0/GMKLEVekNG727R8cs56ahETkRAy/3DR7+fJyh7oUgGwNQaRfXCun0+KbWY7Q== + +"@types/d3-array@*": + version "3.0.2" + resolved "https://registry.yarnpkg.com/@types/d3-array/-/d3-array-3.0.2.tgz#71c35bca8366a40d1b8fce9279afa4a77fb0065d" + integrity sha512-5mjGjz6XOXKOCdTajXTZ/pMsg236RdiwKPrRPWAEf/2S/+PzwY+LLYShUpeysWaMvsdS7LArh6GdUefoxpchsQ== + +"@types/d3-axis@*": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@types/d3-axis/-/d3-axis-3.0.1.tgz#6afc20744fa5cc0cbc3e2bd367b140a79ed3e7a8" + integrity sha512-zji/iIbdd49g9WN0aIsGcwcTBUkgLsCSwB+uH+LPVDAiKWENMtI3cJEWt+7/YYwelMoZmbBfzA3qCdrZ2XFNnw== + dependencies: + "@types/d3-selection" "*" + +"@types/d3-brush@*": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@types/d3-brush/-/d3-brush-3.0.1.tgz#ae5f17ce391935ca88b29000e60ee20452c6357c" + integrity sha512-B532DozsiTuQMHu2YChdZU0qsFJSio3Q6jmBYGYNp3gMDzBmuFFgPt9qKA4VYuLZMp4qc6eX7IUFUEsvHiXZAw== + dependencies: + "@types/d3-selection" "*" + +"@types/d3-chord@*": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@types/d3-chord/-/d3-chord-3.0.1.tgz#54c8856c19c8e4ab36a53f73ba737de4768ad248" + integrity sha512-eQfcxIHrg7V++W8Qxn6QkqBNBokyhdWSAS73AbkbMzvLQmVVBviknoz2SRS/ZJdIOmhcmmdCRE/NFOm28Z1AMw== + +"@types/d3-color@*": + version "3.0.2" + resolved "https://registry.yarnpkg.com/@types/d3-color/-/d3-color-3.0.2.tgz#53f2d6325f66ee79afd707c05ac849e8ae0edbb0" + integrity sha512-WVx6zBiz4sWlboCy7TCgjeyHpNjMsoF36yaagny1uXfbadc9f+5BeBf7U+lRmQqY3EHbGQpP8UdW8AC+cywSwQ== + +"@types/d3-contour@*": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@types/d3-contour/-/d3-contour-3.0.1.tgz#9ff4e2fd2a3910de9c5097270a7da8a6ef240017" + integrity sha512-C3zfBrhHZvrpAAK3YXqLWVAGo87A4SvJ83Q/zVJ8rFWJdKejUnDYaWZPkA8K84kb2vDA/g90LTQAz7etXcgoQQ== + dependencies: + "@types/d3-array" "*" + "@types/geojson" "*" + +"@types/d3-delaunay@*": + version "6.0.0" + resolved "https://registry.yarnpkg.com/@types/d3-delaunay/-/d3-delaunay-6.0.0.tgz#c09953ac7e5460997f693d2d7bf3522e0d4a88e6" + integrity sha512-iGm7ZaGLq11RK3e69VeMM6Oqj2SjKUB9Qhcyd1zIcqn2uE8w9GFB445yCY46NOQO3ByaNyktX1DK+Etz7ZaX+w== + +"@types/d3-dispatch@*": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@types/d3-dispatch/-/d3-dispatch-3.0.1.tgz#a1b18ae5fa055a6734cb3bd3cbc6260ef19676e3" + integrity sha512-NhxMn3bAkqhjoxabVJWKryhnZXXYYVQxaBnbANu0O94+O/nX9qSjrA1P1jbAQJxJf+VC72TxDX/YJcKue5bRqw== + +"@types/d3-drag@*": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@types/d3-drag/-/d3-drag-3.0.1.tgz#fb1e3d5cceeee4d913caa59dedf55c94cb66e80f" + integrity sha512-o1Va7bLwwk6h03+nSM8dpaGEYnoIG19P0lKqlic8Un36ymh9NSkNFX1yiXMKNMx8rJ0Kfnn2eovuFaL6Jvj0zA== + dependencies: + "@types/d3-selection" "*" + +"@types/d3-dsv@*": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@types/d3-dsv/-/d3-dsv-3.0.0.tgz#f3c61fb117bd493ec0e814856feb804a14cfc311" + integrity sha512-o0/7RlMl9p5n6FQDptuJVMxDf/7EDEv2SYEO/CwdG2tr1hTfUVi0Iavkk2ax+VpaQ/1jVhpnj5rq1nj8vwhn2A== + +"@types/d3-ease@*": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@types/d3-ease/-/d3-ease-3.0.0.tgz#c29926f8b596f9dadaeca062a32a45365681eae0" + integrity sha512-aMo4eaAOijJjA6uU+GIeW018dvy9+oH5Y2VPPzjjfxevvGQ/oRDs+tfYC9b50Q4BygRR8yE2QCLsrT0WtAVseA== + +"@types/d3-fetch@*": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@types/d3-fetch/-/d3-fetch-3.0.1.tgz#f9fa88b81aa2eea5814f11aec82ecfddbd0b8fe0" + integrity sha512-toZJNOwrOIqz7Oh6Q7l2zkaNfXkfR7mFSJvGvlD/Ciq/+SQ39d5gynHJZ/0fjt83ec3WL7+u3ssqIijQtBISsw== + dependencies: + "@types/d3-dsv" "*" + +"@types/d3-force@*": + version "3.0.3" + resolved "https://registry.yarnpkg.com/@types/d3-force/-/d3-force-3.0.3.tgz#76cb20d04ae798afede1ea6e41750763ff5a9c82" + integrity sha512-z8GteGVfkWJMKsx6hwC3SiTSLspL98VNpmvLpEFJQpZPq6xpA1I8HNBDNSpukfK0Vb0l64zGFhzunLgEAcBWSA== + +"@types/d3-format@*": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@types/d3-format/-/d3-format-3.0.1.tgz#194f1317a499edd7e58766f96735bdc0216bb89d" + integrity sha512-5KY70ifCCzorkLuIkDe0Z9YTf9RR2CjBX1iaJG+rgM/cPP+sO+q9YdQ9WdhQcgPj1EQiJ2/0+yUkkziTG6Lubg== + +"@types/d3-geo@*": + version "3.0.2" + resolved "https://registry.yarnpkg.com/@types/d3-geo/-/d3-geo-3.0.2.tgz#e7ec5f484c159b2c404c42d260e6d99d99f45d9a" + integrity sha512-DbqK7MLYA8LpyHQfv6Klz0426bQEf7bRTvhMy44sNGVyZoWn//B0c+Qbeg8Osi2Obdc9BLLXYAKpyWege2/7LQ== + dependencies: + "@types/geojson" "*" + +"@types/d3-hierarchy@*": + version "3.0.2" + resolved "https://registry.yarnpkg.com/@types/d3-hierarchy/-/d3-hierarchy-3.0.2.tgz#ca63f2f4da15b8f129c5b7dffd71d904cba6aca2" + integrity sha512-+krnrWOZ+aQB6v+E+jEkmkAx9HvsNAD+1LCD0vlBY3t+HwjKnsBFbpVLx6WWzDzCIuiTWdAxXMEnGnVXpB09qQ== + +"@types/d3-interpolate@*": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@types/d3-interpolate/-/d3-interpolate-3.0.1.tgz#e7d17fa4a5830ad56fe22ce3b4fac8541a9572dc" + integrity sha512-jx5leotSeac3jr0RePOH1KdR9rISG91QIE4Q2PYTu4OymLTZfA3SrnURSLzKH48HmXVUru50b8nje4E79oQSQw== + dependencies: + "@types/d3-color" "*" + +"@types/d3-path@*": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@types/d3-path/-/d3-path-3.0.0.tgz#939e3a784ae4f80b1fde8098b91af1776ff1312b" + integrity sha512-0g/A+mZXgFkQxN3HniRDbXMN79K3CdTpLsevj+PXiTcb2hVyvkZUBg37StmgCQkaD84cUJ4uaDAWq7UJOQy2Tg== + +"@types/d3-polygon@*": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@types/d3-polygon/-/d3-polygon-3.0.0.tgz#5200a3fa793d7736fa104285fa19b0dbc2424b93" + integrity sha512-D49z4DyzTKXM0sGKVqiTDTYr+DHg/uxsiWDAkNrwXYuiZVd9o9wXZIo+YsHkifOiyBkmSWlEngHCQme54/hnHw== + +"@types/d3-quadtree@*": + version "3.0.2" + resolved "https://registry.yarnpkg.com/@types/d3-quadtree/-/d3-quadtree-3.0.2.tgz#433112a178eb7df123aab2ce11c67f51cafe8ff5" + integrity sha512-QNcK8Jguvc8lU+4OfeNx+qnVy7c0VrDJ+CCVFS9srBo2GL9Y18CnIxBdTF3v38flrGy5s1YggcoAiu6s4fLQIw== + +"@types/d3-random@*": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@types/d3-random/-/d3-random-3.0.1.tgz#5c8d42b36cd4c80b92e5626a252f994ca6bfc953" + integrity sha512-IIE6YTekGczpLYo/HehAy3JGF1ty7+usI97LqraNa8IiDur+L44d0VOjAvFQWJVdZOJHukUJw+ZdZBlgeUsHOQ== + +"@types/d3-scale-chromatic@*": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@types/d3-scale-chromatic/-/d3-scale-chromatic-3.0.0.tgz#103124777e8cdec85b20b51fd3397c682ee1e954" + integrity sha512-dsoJGEIShosKVRBZB0Vo3C8nqSDqVGujJU6tPznsBJxNJNwMF8utmS83nvCBKQYPpjCzaaHcrf66iTRpZosLPw== + +"@types/d3-scale@*": + version "4.0.2" + resolved "https://registry.yarnpkg.com/@types/d3-scale/-/d3-scale-4.0.2.tgz#41be241126af4630524ead9cb1008ab2f0f26e69" + integrity sha512-Yk4htunhPAwN0XGlIwArRomOjdoBFXC3+kCxK2Ubg7I9shQlVSJy/pG/Ht5ASN+gdMIalpk8TJ5xV74jFsetLA== + dependencies: + "@types/d3-time" "*" + +"@types/d3-selection@*": + version "3.0.2" + resolved "https://registry.yarnpkg.com/@types/d3-selection/-/d3-selection-3.0.2.tgz#23e48a285b24063630bbe312cc0cfe2276de4a59" + integrity sha512-d29EDd0iUBrRoKhPndhDY6U/PYxOWqgIZwKTooy2UkBfU7TNZNpRho0yLWPxlatQrFWk2mnTu71IZQ4+LRgKlQ== + +"@types/d3-shape@*": + version "3.0.2" + resolved "https://registry.yarnpkg.com/@types/d3-shape/-/d3-shape-3.0.2.tgz#4b1ca4ddaac294e76b712429726d40365cd1e8ca" + integrity sha512-5+ButCmIfNX8id5seZ7jKj3igdcxx+S9IDBiT35fQGTLZUfkFgTv+oBH34xgeoWDKpWcMITSzBILWQtBoN5Piw== + dependencies: + "@types/d3-path" "*" + +"@types/d3-time-format@*": + version "4.0.0" + resolved "https://registry.yarnpkg.com/@types/d3-time-format/-/d3-time-format-4.0.0.tgz#ee7b6e798f8deb2d9640675f8811d0253aaa1946" + integrity sha512-yjfBUe6DJBsDin2BMIulhSHmr5qNR5Pxs17+oW4DoVPyVIXZ+m6bs7j1UVKP08Emv6jRmYrYqxYzO63mQxy1rw== + +"@types/d3-time@*": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@types/d3-time/-/d3-time-3.0.0.tgz#e1ac0f3e9e195135361fa1a1d62f795d87e6e819" + integrity sha512-sZLCdHvBUcNby1cB6Fd3ZBrABbjz3v1Vm90nysCQ6Vt7vd6e/h9Lt7SiJUoEX0l4Dzc7P5llKyhqSi1ycSf1Hg== + +"@types/d3-timer@*": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@types/d3-timer/-/d3-timer-3.0.0.tgz#e2505f1c21ec08bda8915238e397fb71d2fc54ce" + integrity sha512-HNB/9GHqu7Fo8AQiugyJbv6ZxYz58wef0esl4Mv828w1ZKpAshw/uFWVDUcIB9KKFeFKoxS3cHY07FFgtTRZ1g== + +"@types/d3-transition@*": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@types/d3-transition/-/d3-transition-3.0.1.tgz#c9a96125567173d6163a6985b874f79154f4cc3d" + integrity sha512-Sv4qEI9uq3bnZwlOANvYK853zvpdKEm1yz9rcc8ZTsxvRklcs9Fx4YFuGA3gXoQN/c/1T6QkVNjhaRO/cWj94g== + dependencies: + "@types/d3-selection" "*" + +"@types/d3-zoom@*": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@types/d3-zoom/-/d3-zoom-3.0.1.tgz#4bfc7e29625c4f79df38e2c36de52ec3e9faf826" + integrity sha512-7s5L9TjfqIYQmQQEUcpMAcBOahem7TRoSO/+Gkz02GbMVuULiZzjF2BOdw291dbO2aNon4m2OdFsRGaCq2caLQ== + dependencies: + "@types/d3-interpolate" "*" + "@types/d3-selection" "*" + +"@types/d3@^7.1.0": + version "7.1.0" + resolved "https://registry.yarnpkg.com/@types/d3/-/d3-7.1.0.tgz#8f32a7e7f434d8f920c8b1ebdfed55e18c033720" + integrity sha512-gYWvgeGjEl+zmF8c+U1RNIKqe7sfQwIXeLXO5Os72TjDjCEtgpvGBvZ8dXlAuSS1m6B90Y1Uo6Bm36OGR/OtCA== + dependencies: + "@types/d3-array" "*" + "@types/d3-axis" "*" + "@types/d3-brush" "*" + "@types/d3-chord" "*" + "@types/d3-color" "*" + "@types/d3-contour" "*" + "@types/d3-delaunay" "*" + "@types/d3-dispatch" "*" + "@types/d3-drag" "*" + "@types/d3-dsv" "*" + "@types/d3-ease" "*" + "@types/d3-fetch" "*" + "@types/d3-force" "*" + "@types/d3-format" "*" + "@types/d3-geo" "*" + "@types/d3-hierarchy" "*" + "@types/d3-interpolate" "*" + "@types/d3-path" "*" + "@types/d3-polygon" "*" + "@types/d3-quadtree" "*" + "@types/d3-random" "*" + "@types/d3-scale" "*" + "@types/d3-scale-chromatic" "*" + "@types/d3-selection" "*" + "@types/d3-shape" "*" + "@types/d3-time" "*" + "@types/d3-time-format" "*" + "@types/d3-timer" "*" + "@types/d3-transition" "*" + "@types/d3-zoom" "*" + +"@types/eslint-scope@^3.7.0": + version "3.7.3" + resolved "https://registry.yarnpkg.com/@types/eslint-scope/-/eslint-scope-3.7.3.tgz#125b88504b61e3c8bc6f870882003253005c3224" + integrity sha512-PB3ldyrcnAicT35TWPs5IcwKD8S333HMaa2VVv4+wdvebJkjWuW/xESoB8IwRcog8HYVYamb1g/R31Qv5Bx03g== + dependencies: + "@types/eslint" "*" + "@types/estree" "*" + +"@types/eslint@*": + version "8.4.1" + resolved "https://registry.yarnpkg.com/@types/eslint/-/eslint-8.4.1.tgz#c48251553e8759db9e656de3efc846954ac32304" + integrity sha512-GE44+DNEyxxh2Kc6ro/VkIj+9ma0pO0bwv9+uHSyBrikYOHr8zYcdPvnBOp1aw8s+CjRvuSx7CyWqRrNFQ59mA== + dependencies: + "@types/estree" "*" + "@types/json-schema" "*" + +"@types/eslint@^7.28.2": + version "7.29.0" + resolved "https://registry.yarnpkg.com/@types/eslint/-/eslint-7.29.0.tgz#e56ddc8e542815272720bb0b4ccc2aff9c3e1c78" + integrity sha512-VNcvioYDH8/FxaeTKkM4/TiTwt6pBV9E3OfGmvaw8tPl0rrHCJ4Ll15HRT+pMiFAf/MLQvAzC+6RzUMEL9Ceng== + dependencies: + "@types/estree" "*" + "@types/json-schema" "*" + +"@types/estree@*", "@types/estree@^0.0.50": + version "0.0.50" + resolved "https://registry.yarnpkg.com/@types/estree/-/estree-0.0.50.tgz#1e0caa9364d3fccd2931c3ed96fdbeaa5d4cca83" + integrity sha512-C6N5s2ZFtuZRj54k2/zyRhNDjJwwcViAM3Nbm8zjBpbqAdZ00mr0CFxvSKeO8Y/e03WVFLpQMdHYVfUd6SB+Hw== + +"@types/estree@0.0.39": + version "0.0.39" + resolved "https://registry.yarnpkg.com/@types/estree/-/estree-0.0.39.tgz#e177e699ee1b8c22d23174caaa7422644389509f" + integrity sha512-EYNwp3bU+98cpU4lAWYYL7Zz+2gryWH1qbdDTidVd6hkiR6weksdbMadyXKXNPEkQFhXM+hVO9ZygomHXp+AIw== + +"@types/express-serve-static-core@*", "@types/express-serve-static-core@^4.17.18": + version "4.17.28" + resolved "https://registry.yarnpkg.com/@types/express-serve-static-core/-/express-serve-static-core-4.17.28.tgz#c47def9f34ec81dc6328d0b1b5303d1ec98d86b8" + integrity sha512-P1BJAEAW3E2DJUlkgq4tOL3RyMunoWXqbSCygWo5ZIWTjUgN1YnaXWW4VWl/oc8vs/XoYibEGBKP0uZyF4AHig== + dependencies: + "@types/node" "*" + "@types/qs" "*" + "@types/range-parser" "*" + +"@types/express@*": + version "4.17.13" + resolved "https://registry.yarnpkg.com/@types/express/-/express-4.17.13.tgz#a76e2995728999bab51a33fabce1d705a3709034" + integrity sha512-6bSZTPaTIACxn48l50SR+axgrqm6qXFIxrdAKaG6PaJk3+zuUr35hBlgT7vOmJcum+OEaIBLtHV/qloEAFITeA== + dependencies: + "@types/body-parser" "*" + "@types/express-serve-static-core" "^4.17.18" + "@types/qs" "*" + "@types/serve-static" "*" + +"@types/fs-extra@^8.0.1": + version "8.1.2" + resolved "https://registry.yarnpkg.com/@types/fs-extra/-/fs-extra-8.1.2.tgz#7125cc2e4bdd9bd2fc83005ffdb1d0ba00cca61f" + integrity sha512-SvSrYXfWSc7R4eqnOzbQF4TZmfpNSM9FrSWLU3EUnWBuyZqNBOrv1B1JA3byUDPUl9z4Ab3jeZG2eDdySlgNMg== + dependencies: + "@types/node" "*" + +"@types/geojson@*": + version "7946.0.8" + resolved "https://registry.yarnpkg.com/@types/geojson/-/geojson-7946.0.8.tgz#30744afdb385e2945e22f3b033f897f76b1f12ca" + integrity sha512-1rkryxURpr6aWP7R786/UQOkJ3PcpQiWkAXBmdWc7ryFWqN6a4xfK7BtjXvFBKO9LjQ+MWQSWxYeZX1OApnArA== + +"@types/glob@^7.1.1": + version "7.2.0" + resolved "https://registry.yarnpkg.com/@types/glob/-/glob-7.2.0.tgz#bc1b5bf3aa92f25bd5dd39f35c57361bdce5b2eb" + integrity sha512-ZUxbzKl0IfJILTS6t7ip5fQQM/J3TJYubDm3nMbgubNNYS62eXeUpoLUC8/7fJNiFYHTrGPQn7hspDUzIHX3UA== + dependencies: + "@types/minimatch" "*" + "@types/node" "*" + +"@types/graceful-fs@^4.1.2": + version "4.1.5" + resolved "https://registry.yarnpkg.com/@types/graceful-fs/-/graceful-fs-4.1.5.tgz#21ffba0d98da4350db64891f92a9e5db3cdb4e15" + integrity sha512-anKkLmZZ+xm4p8JWBf4hElkM4XR+EZeA2M9BAkkTldmcyDY4mbdIJnRghDJH3Ov5ooY7/UAoENtmdMSkaAd7Cw== + dependencies: + "@types/node" "*" + +"@types/hast@^2.0.0": + version "2.3.4" + resolved "https://registry.yarnpkg.com/@types/hast/-/hast-2.3.4.tgz#8aa5ef92c117d20d974a82bdfb6a648b08c0bafc" + integrity sha512-wLEm0QvaoawEDoTRwzTXp4b4jpwiJDvR5KMnFnVodm3scufTlBOWRD6N1OBf9TZMhjlNsSfcO5V+7AF4+Vy+9g== + dependencies: + "@types/unist" "*" + +"@types/hoist-non-react-statics@^3.3.0": + version "3.3.1" + resolved "https://registry.yarnpkg.com/@types/hoist-non-react-statics/-/hoist-non-react-statics-3.3.1.tgz#1124aafe5118cb591977aeb1ceaaed1070eb039f" + integrity sha512-iMIqiko6ooLrTh1joXodJK5X9xeEALT1kM5G3ZLhD3hszxBdIEd5C75U834D9mLcINgD4OyZf5uQXjkuYydWvA== + dependencies: + "@types/react" "*" + hoist-non-react-statics "^3.3.0" + +"@types/html-minifier-terser@^6.0.0": + version "6.1.0" + resolved "https://registry.yarnpkg.com/@types/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz#4fc33a00c1d0c16987b1a20cf92d20614c55ac35" + integrity sha512-oh/6byDPnL1zeNXFrDXFLyZjkr1MsBG667IM792caf1L2UPOOMf65NFzjUH/ltyfwjAGfs1rsX1eftK0jC/KIg== + +"@types/http-proxy@^1.17.8": + version "1.17.8" + resolved "https://registry.yarnpkg.com/@types/http-proxy/-/http-proxy-1.17.8.tgz#968c66903e7e42b483608030ee85800f22d03f55" + integrity sha512-5kPLG5BKpWYkw/LVOGWpiq3nEVqxiN32rTgI53Sk12/xHFQ2rG3ehI9IO+O3W2QoKeyB92dJkoka8SUm6BX1pA== + dependencies: + "@types/node" "*" + +"@types/inquirer@^8.1.3": + version "8.2.0" + resolved "https://registry.yarnpkg.com/@types/inquirer/-/inquirer-8.2.0.tgz#b9566d048f5ff65159f2ed97aff45fe0f00b35ec" + integrity sha512-BNoMetRf3gmkpAlV5we+kxyZTle7YibdOntIZbU5pyIfMdcwy784KfeZDAcuyMznkh5OLa17RVXZOGA5LTlkgQ== + dependencies: + "@types/through" "*" + rxjs "^7.2.0" + +"@types/istanbul-lib-coverage@*", "@types/istanbul-lib-coverage@^2.0.0", "@types/istanbul-lib-coverage@^2.0.1": + version "2.0.4" + resolved "https://registry.yarnpkg.com/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz#8467d4b3c087805d63580480890791277ce35c44" + integrity sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g== + +"@types/istanbul-lib-report@*": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#c14c24f18ea8190c118ee7562b7ff99a36552686" + integrity sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg== + dependencies: + "@types/istanbul-lib-coverage" "*" + +"@types/istanbul-reports@^3.0.0": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@types/istanbul-reports/-/istanbul-reports-3.0.1.tgz#9153fe98bba2bd565a63add9436d6f0d7f8468ff" + integrity sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw== + dependencies: + "@types/istanbul-lib-report" "*" + +"@types/jest@*", "@types/jest@^27.0.1": + version "27.4.0" + resolved "https://registry.yarnpkg.com/@types/jest/-/jest-27.4.0.tgz#037ab8b872067cae842a320841693080f9cb84ed" + integrity sha512-gHl8XuC1RZ8H2j5sHv/JqsaxXkDDM9iDOgu0Wp8sjs4u/snb2PVehyWXJPr+ORA0RPpgw231mnutWI1+0hgjIQ== + dependencies: + jest-diff "^27.0.0" + pretty-format "^27.0.0" + +"@types/js-levenshtein@^1.1.0": + version "1.1.1" + resolved "https://registry.yarnpkg.com/@types/js-levenshtein/-/js-levenshtein-1.1.1.tgz#ba05426a43f9e4e30b631941e0aa17bf0c890ed5" + integrity sha512-qC4bCqYGy1y/NP7dDVr7KJarn+PbX1nSpwA7JXdu0HxT3QYjO8MJ+cntENtHFVy2dRAyBV23OZ6MxsW1AM1L8g== + +"@types/json-schema@*", "@types/json-schema@^7.0.4", "@types/json-schema@^7.0.5", "@types/json-schema@^7.0.8", "@types/json-schema@^7.0.9": + version "7.0.9" + resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.9.tgz#97edc9037ea0c38585320b28964dde3b39e4660d" + integrity sha512-qcUXuemtEu+E5wZSJHNxUXeCZhAfXKQ41D+duX+VYPde7xyEVZci+/oXKJL13tnRs9lR2pr4fod59GT6/X1/yQ== + +"@types/json5@^0.0.29": + version "0.0.29" + resolved "https://registry.yarnpkg.com/@types/json5/-/json5-0.0.29.tgz#ee28707ae94e11d2b827bcbe5270bcea7f3e71ee" + integrity sha1-7ihweulOEdK4J7y+UnC86n8+ce4= + +"@types/lodash@^4.14.160": + version "4.14.178" + resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.178.tgz#341f6d2247db528d4a13ddbb374bcdc80406f4f8" + integrity sha512-0d5Wd09ItQWH1qFbEyQ7oTQ3GZrMfth5JkbN3EvTKLXcHLRDSXeLnlvlOn0wvxVIwK5o2M8JzP/OWz7T3NRsbw== + +"@types/mdast@^3.0.0": + version "3.0.10" + resolved "https://registry.yarnpkg.com/@types/mdast/-/mdast-3.0.10.tgz#4724244a82a4598884cbbe9bcfd73dff927ee8af" + integrity sha512-W864tg/Osz1+9f4lrGTZpCSO5/z4608eUp19tbozkq2HJK6i3z1kT0H9tlADXuYIb1YYOBByU4Jsqkk75q48qA== + dependencies: + "@types/unist" "*" + +"@types/mime@^1": + version "1.3.2" + resolved "https://registry.yarnpkg.com/@types/mime/-/mime-1.3.2.tgz#93e25bf9ee75fe0fd80b594bc4feb0e862111b5a" + integrity sha512-YATxVxgRqNH6nHEIsvg6k2Boc1JHI9ZbH5iWFFv/MTkchz3b1ieGDa5T0a9RznNdI0KhVbdbWSN+KWWrQZRxTw== + +"@types/minimatch@*": + version "3.0.5" + resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-3.0.5.tgz#1001cc5e6a3704b83c236027e77f2f58ea010f40" + integrity sha512-Klz949h02Gz2uZCMGwDUSDS1YBlTdDDgbWHi+81l29tQALUtvz4rAYi5uoVhE5Lagoq6DeqAUlbrHvW/mXDgdQ== + +"@types/node@*": + version "17.0.13" + resolved "https://registry.yarnpkg.com/@types/node/-/node-17.0.13.tgz#5ed7ed7c662948335fcad6c412bb42d99ea754e3" + integrity sha512-Y86MAxASe25hNzlDbsviXl8jQHb0RDvKt4c40ZJQ1Don0AAL0STLZSs4N+6gLEO55pedy7r2cLwS+ZDxPm/2Bw== + +"@types/node@^16.7.13": + version "16.11.21" + resolved "https://registry.yarnpkg.com/@types/node/-/node-16.11.21.tgz#474d7589a30afcf5291f59bd49cca9ad171ffde4" + integrity sha512-Pf8M1XD9i1ksZEcCP8vuSNwooJ/bZapNmIzpmsMaL+jMI+8mEYU3PKvs+xDNuQcJWF/x24WzY4qxLtB0zNow9A== + +"@types/numeral@^0.0.28": + version "0.0.28" + resolved "https://registry.yarnpkg.com/@types/numeral/-/numeral-0.0.28.tgz#e43928f0bda10b169b6f7ecf99e3ddf836b8ebe4" + integrity sha512-Sjsy10w6XFHDktJJdXzBJmoondAKW+LcGpRFH+9+zXEDj0cOH8BxJuZA9vUDSMAzU1YRJlsPKmZEEiTYDlICLw== + +"@types/parse-json@^4.0.0": + version "4.0.0" + resolved "https://registry.yarnpkg.com/@types/parse-json/-/parse-json-4.0.0.tgz#2f8bb441434d163b35fb8ffdccd7138927ffb8c0" + integrity sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA== + +"@types/parse5@^5.0.0": + version "5.0.3" + resolved "https://registry.yarnpkg.com/@types/parse5/-/parse5-5.0.3.tgz#e7b5aebbac150f8b5fdd4a46e7f0bd8e65e19109" + integrity sha512-kUNnecmtkunAoQ3CnjmMkzNU/gtxG8guhi+Fk2U/kOpIKjIMKnXGp4IJCgQJrXSgMsWYimYG4TGjz/UzbGEBTw== + +"@types/prettier@^2.1.5": + version "2.4.3" + resolved "https://registry.yarnpkg.com/@types/prettier/-/prettier-2.4.3.tgz#a3c65525b91fca7da00ab1a3ac2b5a2a4afbffbf" + integrity sha512-QzSuZMBuG5u8HqYz01qtMdg/Jfctlnvj1z/lYnIDXs/golxw0fxtRAHd9KrzjR7Yxz1qVeI00o0kiO3PmVdJ9w== + +"@types/prismjs@*": + version "1.26.0" + resolved "https://registry.yarnpkg.com/@types/prismjs/-/prismjs-1.26.0.tgz#a1c3809b0ad61c62cac6d4e0c56d610c910b7654" + integrity sha512-ZTaqn/qSqUuAq1YwvOFQfVW1AR/oQJlLSZVustdjwI+GZ8kr0MSHBj0tsXPW1EqHubx50gtBEjbPGsdZwQwCjQ== + +"@types/prop-types@*": + version "15.7.4" + resolved "https://registry.yarnpkg.com/@types/prop-types/-/prop-types-15.7.4.tgz#fcf7205c25dff795ee79af1e30da2c9790808f11" + integrity sha512-rZ5drC/jWjrArrS8BR6SIr4cWpW09RNTYt9AMZo3Jwwif+iacXAqgVjm0B0Bv/S1jhDXKHqRVNCbACkJ89RAnQ== + +"@types/q@^1.5.1": + version "1.5.5" + resolved "https://registry.yarnpkg.com/@types/q/-/q-1.5.5.tgz#75a2a8e7d8ab4b230414505d92335d1dcb53a6df" + integrity sha512-L28j2FcJfSZOnL1WBjDYp2vUHCeIFlyYI/53EwD/rKUBQ7MtUUfbQWiyKJGpcnv4/WgrhWsFKrcPstcAt/J0tQ== + +"@types/qs@*": + version "6.9.7" + resolved "https://registry.yarnpkg.com/@types/qs/-/qs-6.9.7.tgz#63bb7d067db107cc1e457c303bc25d511febf6cb" + integrity sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw== + +"@types/range-parser@*": + version "1.2.4" + resolved "https://registry.yarnpkg.com/@types/range-parser/-/range-parser-1.2.4.tgz#cd667bcfdd025213aafb7ca5915a932590acdcdc" + integrity sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw== + +"@types/react-beautiful-dnd@^13.0.0": + version "13.1.2" + resolved "https://registry.yarnpkg.com/@types/react-beautiful-dnd/-/react-beautiful-dnd-13.1.2.tgz#510405abb09f493afdfd898bf83995dc6385c130" + integrity sha512-+OvPkB8CdE/bGdXKyIhc/Lm2U7UAYCCJgsqmopFmh9gbAudmslkI8eOrPDjg4JhwSE6wytz4a3/wRjKtovHVJg== + dependencies: + "@types/react" "*" + +"@types/react-dom@^17.0.9": + version "17.0.11" + resolved "https://registry.yarnpkg.com/@types/react-dom/-/react-dom-17.0.11.tgz#e1eadc3c5e86bdb5f7684e00274ae228e7bcc466" + integrity sha512-f96K3k+24RaLGVu/Y2Ng3e1EbZ8/cVJvypZWd7cy0ofCBaf2lcM46xNhycMZ2xGwbBjRql7hOlZ+e2WlJ5MH3Q== + dependencies: + "@types/react" "*" + +"@types/react-input-autosize@^2.2.0": + version "2.2.1" + resolved "https://registry.yarnpkg.com/@types/react-input-autosize/-/react-input-autosize-2.2.1.tgz#6a335212e7fce1e1a4da56ae2095c8c5c35fbfe6" + integrity sha512-RxzEjd4gbLAAdLQ92Q68/AC+TfsAKTc4evsArUH1aIShIMqQMIMjsxoSnwyjtbFTO/AGIW/RQI94XSdvOxCz/w== + dependencies: + "@types/react" "*" + +"@types/react-redux@^7.1.20": + version "7.1.22" + resolved "https://registry.yarnpkg.com/@types/react-redux/-/react-redux-7.1.22.tgz#0eab76a37ef477cc4b53665aeaf29cb60631b72a" + integrity sha512-GxIA1kM7ClU73I6wg9IRTVwSO9GS+SAKZKe0Enj+82HMU6aoESFU2HNAdNi3+J53IaOHPiUfT3kSG4L828joDQ== + dependencies: + "@types/hoist-non-react-statics" "^3.3.0" + "@types/react" "*" + hoist-non-react-statics "^3.3.0" + redux "^4.0.0" + +"@types/react-virtualized-auto-sizer@^1.0.0": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@types/react-virtualized-auto-sizer/-/react-virtualized-auto-sizer-1.0.1.tgz#b3187dae1dfc4c15880c9cfc5b45f2719ea6ebd4" + integrity sha512-GH8sAnBEM5GV9LTeiz56r4ZhMOUSrP43tAQNSRVxNexDjcNKLCEtnxusAItg1owFUFE6k0NslV26gqVClVvong== + dependencies: + "@types/react" "*" + +"@types/react-window@^1.8.2": + version "1.8.5" + resolved "https://registry.yarnpkg.com/@types/react-window/-/react-window-1.8.5.tgz#285fcc5cea703eef78d90f499e1457e9b5c02fc1" + integrity sha512-V9q3CvhC9Jk9bWBOysPGaWy/Z0lxYcTXLtLipkt2cnRj1JOSFNF7wqGpkScSXMgBwC+fnVRg/7shwgddBG5ICw== + dependencies: + "@types/react" "*" + +"@types/react@*", "@types/react@^17.0.20": + version "17.0.38" + resolved "https://registry.yarnpkg.com/@types/react/-/react-17.0.38.tgz#f24249fefd89357d5fa71f739a686b8d7c7202bd" + integrity sha512-SI92X1IA+FMnP3qM5m4QReluXzhcmovhZnLNm3pyeQlooi02qI7sLiepEYqT678uNiyc25XfCqxREFpy3W7YhQ== + dependencies: + "@types/prop-types" "*" + "@types/scheduler" "*" + csstype "^3.0.2" + +"@types/refractor@^3.0.0": + version "3.0.2" + resolved "https://registry.yarnpkg.com/@types/refractor/-/refractor-3.0.2.tgz#2d42128d59f78f84d2c799ffc5ab5cadbcba2d82" + integrity sha512-2HMXuwGuOqzUG+KUTm9GDJCHl0LCBKsB5cg28ujEmVi/0qgTb6jOmkVSO5K48qXksyl2Fr3C0Q2VrgD4zbwyXg== + dependencies: + "@types/prismjs" "*" + +"@types/resize-observer-browser@^0.1.5": + version "0.1.6" + resolved "https://registry.yarnpkg.com/@types/resize-observer-browser/-/resize-observer-browser-0.1.6.tgz#d8e6c2f830e2650dc06fe74464472ff64b54a302" + integrity sha512-61IfTac0s9jvNtBCpyo86QeaN8qqpMGHdK0uGKCCIy2dt5/Yk84VduHIdWAcmkC5QvdkPL0p5eWYgUZtHKKUVg== + +"@types/resolve@1.17.1": + version "1.17.1" + resolved "https://registry.yarnpkg.com/@types/resolve/-/resolve-1.17.1.tgz#3afd6ad8967c77e4376c598a82ddd58f46ec45d6" + integrity sha512-yy7HuzQhj0dhGpD8RLXSZWEkLsV9ibvxvi6EiJ3bkqLAO1RGo0WbkWQiwpRlSFymTJRz0d3k5LM3kkx8ArDbLw== + dependencies: + "@types/node" "*" + +"@types/retry@^0.12.0": + version "0.12.1" + resolved "https://registry.yarnpkg.com/@types/retry/-/retry-0.12.1.tgz#d8f1c0d0dc23afad6dc16a9e993a0865774b4065" + integrity sha512-xoDlM2S4ortawSWORYqsdU+2rxdh4LRW9ytc3zmT37RIKQh6IHyKwwtKhKis9ah8ol07DCkZxPt8BBvPjC6v4g== + +"@types/scheduler@*": + version "0.16.2" + resolved "https://registry.yarnpkg.com/@types/scheduler/-/scheduler-0.16.2.tgz#1a62f89525723dde24ba1b01b092bf5df8ad4d39" + integrity sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew== + +"@types/serve-index@^1.9.1": + version "1.9.1" + resolved "https://registry.yarnpkg.com/@types/serve-index/-/serve-index-1.9.1.tgz#1b5e85370a192c01ec6cec4735cf2917337a6278" + integrity sha512-d/Hs3nWDxNL2xAczmOVZNj92YZCS6RGxfBPjKzuu/XirCgXdpKEb88dYNbrYGint6IVWLNP+yonwVAuRC0T2Dg== + dependencies: + "@types/express" "*" + +"@types/serve-static@*": + version "1.13.10" + resolved "https://registry.yarnpkg.com/@types/serve-static/-/serve-static-1.13.10.tgz#f5e0ce8797d2d7cc5ebeda48a52c96c4fa47a8d9" + integrity sha512-nCkHGI4w7ZgAdNkrEu0bv+4xNV/XDqW+DydknebMOQwkpDGx8G+HTlj7R7ABI8i8nKxVw0wtKPi1D+lPOkh4YQ== + dependencies: + "@types/mime" "^1" + "@types/node" "*" + +"@types/set-cookie-parser@^2.4.0": + version "2.4.2" + resolved "https://registry.yarnpkg.com/@types/set-cookie-parser/-/set-cookie-parser-2.4.2.tgz#b6a955219b54151bfebd4521170723df5e13caad" + integrity sha512-fBZgytwhYAUkj/jC/FAV4RQ5EerRup1YQsXQCh8rZfiHkc4UahC192oH0smGwsXol3cL3A5oETuAHeQHmhXM4w== + dependencies: + "@types/node" "*" + +"@types/sockjs@^0.3.33": + version "0.3.33" + resolved "https://registry.yarnpkg.com/@types/sockjs/-/sockjs-0.3.33.tgz#570d3a0b99ac995360e3136fd6045113b1bd236f" + integrity sha512-f0KEEe05NvUnat+boPTZ0dgaLZ4SfSouXUgv5noUiefG2ajgKjmETo9ZJyuqsl7dfl2aHlLJUiki6B4ZYldiiw== + dependencies: + "@types/node" "*" + +"@types/stack-utils@^2.0.0": + version "2.0.1" + resolved "https://registry.yarnpkg.com/@types/stack-utils/-/stack-utils-2.0.1.tgz#20f18294f797f2209b5f65c8e3b5c8e8261d127c" + integrity sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw== + +"@types/testing-library__jest-dom@^5.9.1": + version "5.14.2" + resolved "https://registry.yarnpkg.com/@types/testing-library__jest-dom/-/testing-library__jest-dom-5.14.2.tgz#564fb2b2dc827147e937a75b639a05d17ce18b44" + integrity sha512-vehbtyHUShPxIa9SioxDwCvgxukDMH//icJG90sXQBUm5lJOHLT5kNeU9tnivhnA/TkOFMzGIXN2cTc4hY8/kg== + dependencies: + "@types/jest" "*" + +"@types/through@*": + version "0.0.30" + resolved "https://registry.yarnpkg.com/@types/through/-/through-0.0.30.tgz#e0e42ce77e897bd6aead6f6ea62aeb135b8a3895" + integrity sha512-FvnCJljyxhPM3gkRgWmxmDZyAQSiBQQWLI0A0VFL0K7W1oRUrPJSqNO0NvTnLkBcotdlp3lKvaT0JrnyRDkzOg== + dependencies: + "@types/node" "*" + +"@types/trusted-types@^2.0.2": + version "2.0.2" + resolved "https://registry.yarnpkg.com/@types/trusted-types/-/trusted-types-2.0.2.tgz#fc25ad9943bcac11cceb8168db4f275e0e72e756" + integrity sha512-F5DIZ36YVLE+PN+Zwws4kJogq47hNgX3Nx6WyDJ3kcplxyke3XIzB8uK5n/Lpm1HBsbGzd6nmGehL8cPekP+Tg== + +"@types/unist@*", "@types/unist@^2.0.0", "@types/unist@^2.0.2", "@types/unist@^2.0.3": + version "2.0.6" + resolved "https://registry.yarnpkg.com/@types/unist/-/unist-2.0.6.tgz#250a7b16c3b91f672a24552ec64678eeb1d3a08d" + integrity sha512-PBjIUxZHOuj0R15/xuwJYjFi+KZdNFrehocChv4g5hu6aFroHue8m0lBP0POdK2nKzbw0cgV1mws8+V/JAcEkQ== + +"@types/vfile-message@^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@types/vfile-message/-/vfile-message-2.0.0.tgz#690e46af0fdfc1f9faae00cd049cc888957927d5" + integrity sha512-GpTIuDpb9u4zIO165fUy9+fXcULdD8HFRNli04GehoMVbeNq7D6OBnqSmg3lxZnC+UvgUhEWKxdKiwYUkGltIw== + dependencies: + vfile-message "*" + +"@types/ws@^8.2.2": + version "8.2.2" + resolved "https://registry.yarnpkg.com/@types/ws/-/ws-8.2.2.tgz#7c5be4decb19500ae6b3d563043cd407bf366c21" + integrity sha512-NOn5eIcgWLOo6qW8AcuLZ7G8PycXu0xTxxkS6Q18VWFxgPUSOwV0pBj2a/4viNZVu25i7RIB7GttdkAIUUXOOg== + dependencies: + "@types/node" "*" + +"@types/yargs-parser@*": + version "20.2.1" + resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-20.2.1.tgz#3b9ce2489919d9e4fea439b76916abc34b2df129" + integrity sha512-7tFImggNeNBVMsn0vLrpn1H1uPrUBdnARPTpZoitY37ZrdJREzf7I16tMrlK3hen349gr1NYh8CmZQa7CTG6Aw== + +"@types/yargs@^16.0.0": + version "16.0.4" + resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-16.0.4.tgz#26aad98dd2c2a38e421086ea9ad42b9e51642977" + integrity sha512-T8Yc9wt/5LbJyCaLiHPReJa0kApcIgJ7Bn735GjItUfh08Z1pJvu8QZqb9s+mMvKV6WUQRV7K2R46YbjMXTTJw== + dependencies: + "@types/yargs-parser" "*" + +"@typescript-eslint/eslint-plugin@^5.5.0": + version "5.10.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.10.1.tgz#870195d0f2146b36d11fc71131b75aba52354c69" + integrity sha512-xN3CYqFlyE/qOcy978/L0xLR2HlcAGIyIK5sMOasxaaAPfQRj/MmMV6OC3I7NZO84oEUdWCOju34Z9W8E0pFDQ== + dependencies: + "@typescript-eslint/scope-manager" "5.10.1" + "@typescript-eslint/type-utils" "5.10.1" + "@typescript-eslint/utils" "5.10.1" + debug "^4.3.2" + functional-red-black-tree "^1.0.1" + ignore "^5.1.8" + regexpp "^3.2.0" + semver "^7.3.5" + tsutils "^3.21.0" + +"@typescript-eslint/experimental-utils@^5.0.0", "@typescript-eslint/experimental-utils@^5.9.0": + version "5.10.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/experimental-utils/-/experimental-utils-5.10.1.tgz#49fa5a7800ed08ea70aef14fccb14fbae85116ab" + integrity sha512-Ryeb8nkJa/1zKl8iujNtJC8tgj6PgaY0sDUnrTqbmC70nrKKkZaHfiRDTcqICmCSCEQyLQcJAoh0AukLaIaGTw== + dependencies: + "@typescript-eslint/utils" "5.10.1" + +"@typescript-eslint/parser@^5.5.0": + version "5.10.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-5.10.1.tgz#4ce9633cc33fc70bc13786cb793c1a76fe5ad6bd" + integrity sha512-GReo3tjNBwR5RnRO0K2wDIDN31cM3MmDtgyQ85oAxAmC5K3j/g85IjP+cDfcqDsDDBf1HNKQAD0WqOYL8jXqUA== + dependencies: + "@typescript-eslint/scope-manager" "5.10.1" + "@typescript-eslint/types" "5.10.1" + "@typescript-eslint/typescript-estree" "5.10.1" + debug "^4.3.2" + +"@typescript-eslint/scope-manager@5.10.1": + version "5.10.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-5.10.1.tgz#f0539c73804d2423506db2475352a4dec36cd809" + integrity sha512-Lyvi559Gvpn94k7+ElXNMEnXu/iundV5uFmCUNnftbFrUbAJ1WBoaGgkbOBm07jVZa682oaBU37ao/NGGX4ZDg== + dependencies: + "@typescript-eslint/types" "5.10.1" + "@typescript-eslint/visitor-keys" "5.10.1" + +"@typescript-eslint/type-utils@5.10.1": + version "5.10.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/type-utils/-/type-utils-5.10.1.tgz#5e526c00142585e40ab1503e83f1ff608c367405" + integrity sha512-AfVJkV8uck/UIoDqhu+ptEdBoQATON9GXnhOpPLzkQRJcSChkvD//qsz9JVffl2goxX+ybs5klvacE9vmrQyCw== + dependencies: + "@typescript-eslint/utils" "5.10.1" + debug "^4.3.2" + tsutils "^3.21.0" + +"@typescript-eslint/types@5.10.1": + version "5.10.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-5.10.1.tgz#dca9bd4cb8c067fc85304a31f38ec4766ba2d1ea" + integrity sha512-ZvxQ2QMy49bIIBpTqFiOenucqUyjTQ0WNLhBM6X1fh1NNlYAC6Kxsx8bRTY3jdYsYg44a0Z/uEgQkohbR0H87Q== + +"@typescript-eslint/typescript-estree@5.10.1": + version "5.10.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-5.10.1.tgz#b268e67be0553f8790ba3fe87113282977adda15" + integrity sha512-PwIGnH7jIueXv4opcwEbVGDATjGPO1dx9RkUl5LlHDSe+FXxPwFL5W/qYd5/NHr7f6lo/vvTrAzd0KlQtRusJQ== + dependencies: + "@typescript-eslint/types" "5.10.1" + "@typescript-eslint/visitor-keys" "5.10.1" + debug "^4.3.2" + globby "^11.0.4" + is-glob "^4.0.3" + semver "^7.3.5" + tsutils "^3.21.0" + +"@typescript-eslint/utils@5.10.1": + version "5.10.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-5.10.1.tgz#fa682a33af47080ba2c4368ee0ad2128213a1196" + integrity sha512-RRmlITiUbLuTRtn/gcPRi4202niF+q7ylFLCKu4c+O/PcpRvZ/nAUwQ2G00bZgpWkhrNLNnvhZLbDn8Ml0qsQw== + dependencies: + "@types/json-schema" "^7.0.9" + "@typescript-eslint/scope-manager" "5.10.1" + "@typescript-eslint/types" "5.10.1" + "@typescript-eslint/typescript-estree" "5.10.1" + eslint-scope "^5.1.1" + eslint-utils "^3.0.0" + +"@typescript-eslint/visitor-keys@5.10.1": + version "5.10.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-5.10.1.tgz#29102de692f59d7d34ecc457ed59ab5fc558010b" + integrity sha512-NjQ0Xinhy9IL979tpoTRuLKxMc0zJC7QVSdeerXs2/QvOy2yRkzX5dRb10X5woNUdJgU8G3nYRDlI33sq1K4YQ== + dependencies: + "@typescript-eslint/types" "5.10.1" + eslint-visitor-keys "^3.0.0" + +"@webassemblyjs/ast@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.11.1.tgz#2bfd767eae1a6996f432ff7e8d7fc75679c0b6a7" + integrity sha512-ukBh14qFLjxTQNTXocdyksN5QdM28S1CxHt2rdskFyL+xFV7VremuBLVbmCePj+URalXBENx/9Lm7lnhihtCSw== + dependencies: + "@webassemblyjs/helper-numbers" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + +"@webassemblyjs/floating-point-hex-parser@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.1.tgz#f6c61a705f0fd7a6aecaa4e8198f23d9dc179e4f" + integrity sha512-iGRfyc5Bq+NnNuX8b5hwBrRjzf0ocrJPI6GWFodBFzmFnyvrQ83SHKhmilCU/8Jv67i4GJZBMhEzltxzcNagtQ== + +"@webassemblyjs/helper-api-error@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.1.tgz#1a63192d8788e5c012800ba6a7a46c705288fd16" + integrity sha512-RlhS8CBCXfRUR/cwo2ho9bkheSXG0+NwooXcc3PAILALf2QLdFyj7KGsKRbVc95hZnhnERon4kW/D3SZpp6Tcg== + +"@webassemblyjs/helper-buffer@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.1.tgz#832a900eb444884cde9a7cad467f81500f5e5ab5" + integrity sha512-gwikF65aDNeeXa8JxXa2BAk+REjSyhrNC9ZwdT0f8jc4dQQeDQ7G4m0f2QCLPJiMTTO6wfDmRmj/pW0PsUvIcA== + +"@webassemblyjs/helper-numbers@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.1.tgz#64d81da219fbbba1e3bd1bfc74f6e8c4e10a62ae" + integrity sha512-vDkbxiB8zfnPdNK9Rajcey5C0w+QJugEglN0of+kmO8l7lDb77AnlKYQF7aarZuCrv+l0UvqL+68gSDr3k9LPQ== + dependencies: + "@webassemblyjs/floating-point-hex-parser" "1.11.1" + "@webassemblyjs/helper-api-error" "1.11.1" + "@xtuc/long" "4.2.2" + +"@webassemblyjs/helper-wasm-bytecode@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.1.tgz#f328241e41e7b199d0b20c18e88429c4433295e1" + integrity sha512-PvpoOGiJwXeTrSf/qfudJhwlvDQxFgelbMqtq52WWiXC6Xgg1IREdngmPN3bs4RoO83PnL/nFrxucXj1+BX62Q== + +"@webassemblyjs/helper-wasm-section@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.1.tgz#21ee065a7b635f319e738f0dd73bfbda281c097a" + integrity sha512-10P9No29rYX1j7F3EVPX3JvGPQPae+AomuSTPiF9eBQeChHI6iqjMIwR9JmOJXwpnn/oVGDk7I5IlskuMwU/pg== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-buffer" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + "@webassemblyjs/wasm-gen" "1.11.1" + +"@webassemblyjs/ieee754@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.11.1.tgz#963929e9bbd05709e7e12243a099180812992614" + integrity sha512-hJ87QIPtAMKbFq6CGTkZYJivEwZDbQUgYd3qKSadTNOhVY7p+gfP6Sr0lLRVTaG1JjFj+r3YchoqRYxNH3M0GQ== + dependencies: + "@xtuc/ieee754" "^1.2.0" + +"@webassemblyjs/leb128@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.11.1.tgz#ce814b45574e93d76bae1fb2644ab9cdd9527aa5" + integrity sha512-BJ2P0hNZ0u+Th1YZXJpzW6miwqQUGcIHT1G/sf72gLVD9DZ5AdYTqPNbHZh6K1M5VmKvFXwGSWZADz+qBWxeRw== + dependencies: + "@xtuc/long" "4.2.2" + +"@webassemblyjs/utf8@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.11.1.tgz#d1f8b764369e7c6e6bae350e854dec9a59f0a3ff" + integrity sha512-9kqcxAEdMhiwQkHpkNiorZzqpGrodQQ2IGrHHxCy+Ozng0ofyMA0lTqiLkVs1uzTRejX+/O0EOT7KxqVPuXosQ== + +"@webassemblyjs/wasm-edit@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.1.tgz#ad206ebf4bf95a058ce9880a8c092c5dec8193d6" + integrity sha512-g+RsupUC1aTHfR8CDgnsVRVZFJqdkFHpsHMfJuWQzWU3tvnLC07UqHICfP+4XyL2tnr1amvl1Sdp06TnYCmVkA== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-buffer" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + "@webassemblyjs/helper-wasm-section" "1.11.1" + "@webassemblyjs/wasm-gen" "1.11.1" + "@webassemblyjs/wasm-opt" "1.11.1" + "@webassemblyjs/wasm-parser" "1.11.1" + "@webassemblyjs/wast-printer" "1.11.1" + +"@webassemblyjs/wasm-gen@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.1.tgz#86c5ea304849759b7d88c47a32f4f039ae3c8f76" + integrity sha512-F7QqKXwwNlMmsulj6+O7r4mmtAlCWfO/0HdgOxSklZfQcDu0TpLiD1mRt/zF25Bk59FIjEuGAIyn5ei4yMfLhA== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + "@webassemblyjs/ieee754" "1.11.1" + "@webassemblyjs/leb128" "1.11.1" + "@webassemblyjs/utf8" "1.11.1" + +"@webassemblyjs/wasm-opt@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.1.tgz#657b4c2202f4cf3b345f8a4c6461c8c2418985f2" + integrity sha512-VqnkNqnZlU5EB64pp1l7hdm3hmQw7Vgqa0KF/KCNO9sIpI6Fk6brDEiX+iCOYrvMuBWDws0NkTOxYEb85XQHHw== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-buffer" "1.11.1" + "@webassemblyjs/wasm-gen" "1.11.1" + "@webassemblyjs/wasm-parser" "1.11.1" + +"@webassemblyjs/wasm-parser@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.1.tgz#86ca734534f417e9bd3c67c7a1c75d8be41fb199" + integrity sha512-rrBujw+dJu32gYB7/Lup6UhdkPx9S9SnobZzRVL7VcBH9Bt9bCBLEuX/YXOOtBsOZ4NQrRykKhffRWHvigQvOA== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-api-error" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + "@webassemblyjs/ieee754" "1.11.1" + "@webassemblyjs/leb128" "1.11.1" + "@webassemblyjs/utf8" "1.11.1" + +"@webassemblyjs/wast-printer@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.11.1.tgz#d0c73beda8eec5426f10ae8ef55cee5e7084c2f0" + integrity sha512-IQboUWM4eKzWW+N/jij2sRatKMh99QEelo3Eb2q0qXkvPRISAj8Qxtmw5itwqK+TTkBuUIE45AxYPToqPtL5gg== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@xtuc/long" "4.2.2" + +"@xmldom/xmldom@^0.7.2": + version "0.7.5" + resolved "https://registry.yarnpkg.com/@xmldom/xmldom/-/xmldom-0.7.5.tgz#09fa51e356d07d0be200642b0e4f91d8e6dd408d" + integrity sha512-V3BIhmY36fXZ1OtVcI9W+FxQqxVLsPKcNjWigIaa81dLC9IolJl5Mt4Cvhmr0flUnjSpTdrbMTSbXqYqV5dT6A== + +"@xtuc/ieee754@^1.2.0": + version "1.2.0" + resolved "https://registry.yarnpkg.com/@xtuc/ieee754/-/ieee754-1.2.0.tgz#eef014a3145ae477a1cbc00cd1e552336dceb790" + integrity sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA== + +"@xtuc/long@4.2.2": + version "4.2.2" + resolved "https://registry.yarnpkg.com/@xtuc/long/-/long-4.2.2.tgz#d291c6a4e97989b5c61d9acf396ae4fe133a718d" + integrity sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ== + +abab@^2.0.3, abab@^2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/abab/-/abab-2.0.5.tgz#c0b678fb32d60fc1219c784d6a826fe385aeb79a" + integrity sha512-9IK9EadsbHo6jLWIpxpR6pL0sazTXV6+SQv25ZB+F7Bj9mJNaOc4nCRabwd5M/JwmUa8idz6Eci6eKfJryPs6Q== + +accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.7: + version "1.3.7" + resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.7.tgz#531bc726517a3b2b41f850021c6cc15eaab507cd" + integrity sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA== + dependencies: + mime-types "~2.1.24" + negotiator "0.6.2" + +acorn-globals@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/acorn-globals/-/acorn-globals-6.0.0.tgz#46cdd39f0f8ff08a876619b55f5ac8a6dc770b45" + integrity sha512-ZQl7LOWaF5ePqqcX4hLuv/bLXYQNfNWw2c0/yX/TsPRKamzHcTGQnlCjHT3TsmkOUVEPS3crCxiPfdzE/Trlhg== + dependencies: + acorn "^7.1.1" + acorn-walk "^7.1.1" + +acorn-import-assertions@^1.7.6: + version "1.8.0" + resolved "https://registry.yarnpkg.com/acorn-import-assertions/-/acorn-import-assertions-1.8.0.tgz#ba2b5939ce62c238db6d93d81c9b111b29b855e9" + integrity sha512-m7VZ3jwz4eK6A4Vtt8Ew1/mNbP24u0FhdyfA7fSvnJR6LMdfOYnmuIrrJAgrYfYJ10F/otaHTtrtrtmHdMNzEw== + +acorn-jsx@^5.3.1: + version "5.3.2" + resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" + integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== + +acorn-node@^1.6.1: + version "1.8.2" + resolved "https://registry.yarnpkg.com/acorn-node/-/acorn-node-1.8.2.tgz#114c95d64539e53dede23de8b9d96df7c7ae2af8" + integrity sha512-8mt+fslDufLYntIoPAaIMUe/lrbrehIiwmR3t2k9LljIzoigEPF27eLk2hy8zSGzmR/ogr7zbRKINMo1u0yh5A== + dependencies: + acorn "^7.0.0" + acorn-walk "^7.0.0" + xtend "^4.0.2" + +acorn-walk@^7.0.0, acorn-walk@^7.1.1: + version "7.2.0" + resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-7.2.0.tgz#0de889a601203909b0fbe07b8938dc21d2e967bc" + integrity sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA== + +acorn@^7.0.0, acorn@^7.1.1: + version "7.4.1" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" + integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== + +acorn@^8.2.4, acorn@^8.4.1, acorn@^8.7.0: + version "8.7.0" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.7.0.tgz#90951fde0f8f09df93549481e5fc141445b791cf" + integrity sha512-V/LGr1APy+PXIwKebEWrkZPwoeoF+w1jiOBUmuxuiUIaOHtob8Qc9BTrYo7VuI5fR8tqsy+buA2WFooR5olqvQ== + +address@^1.0.1, address@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/address/-/address-1.1.2.tgz#bf1116c9c758c51b7a933d296b72c221ed9428b6" + integrity sha512-aT6camzM4xEA54YVJYSqxz1kv4IHnQZRtThJJHhUMRExaU5spC7jX5ugSwTaTgJliIgs4VhZOk7htClvQ/LmRA== + +adjust-sourcemap-loader@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/adjust-sourcemap-loader/-/adjust-sourcemap-loader-4.0.0.tgz#fc4a0fd080f7d10471f30a7320f25560ade28c99" + integrity sha512-OXwN5b9pCUXNQHJpwwD2qP40byEmSgzj8B4ydSN0uMNYWiFmJ6x6KwUllMmfk8Rwu/HJDFR7U8ubsWBoN0Xp0A== + dependencies: + loader-utils "^2.0.0" + regex-parser "^2.2.11" + +agent-base@6: + version "6.0.2" + resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77" + integrity sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ== + dependencies: + debug "4" + +aggregate-error@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/aggregate-error/-/aggregate-error-3.1.0.tgz#92670ff50f5359bdb7a3e0d40d0ec30c5737687a" + integrity sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA== + dependencies: + clean-stack "^2.0.0" + indent-string "^4.0.0" + +ajv-formats@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/ajv-formats/-/ajv-formats-2.1.1.tgz#6e669400659eb74973bbf2e33327180a0996b520" + integrity sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA== + dependencies: + ajv "^8.0.0" + +ajv-keywords@^3.4.1, ajv-keywords@^3.5.2: + version "3.5.2" + resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-3.5.2.tgz#31f29da5ab6e00d1c2d329acf7b5929614d5014d" + integrity sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ== + +ajv-keywords@^5.0.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-5.1.0.tgz#69d4d385a4733cdbeab44964a1170a88f87f0e16" + integrity sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw== + dependencies: + fast-deep-equal "^3.1.3" + +ajv@^6.10.0, ajv@^6.12.2, ajv@^6.12.4, ajv@^6.12.5: + version "6.12.6" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" + integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== + dependencies: + fast-deep-equal "^3.1.1" + fast-json-stable-stringify "^2.0.0" + json-schema-traverse "^0.4.1" + uri-js "^4.2.2" + +ajv@^8.0.0, ajv@^8.6.0, ajv@^8.8.0: + version "8.9.0" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-8.9.0.tgz#738019146638824dea25edcf299dcba1b0e7eb18" + integrity sha512-qOKJyNj/h+OWx7s5DePL6Zu1KeM9jPZhwBqs+7DzP6bGOvqzVCSf0xueYmVuaC/oQ/VtS2zLMLHdQFbkka+XDQ== + dependencies: + fast-deep-equal "^3.1.1" + json-schema-traverse "^1.0.0" + require-from-string "^2.0.2" + uri-js "^4.2.2" + +ansi-escapes@^4.2.1, ansi-escapes@^4.3.1: + version "4.3.2" + resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e" + integrity sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ== + dependencies: + type-fest "^0.21.3" + +ansi-html-community@^0.0.8: + version "0.0.8" + resolved "https://registry.yarnpkg.com/ansi-html-community/-/ansi-html-community-0.0.8.tgz#69fbc4d6ccbe383f9736934ae34c3f8290f1bf41" + integrity sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw== + +ansi-regex@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" + integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== + +ansi-regex@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-6.0.1.tgz#3183e38fae9a65d7cb5e53945cd5897d0260a06a" + integrity sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA== + +ansi-styles@^3.2.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" + integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== + dependencies: + color-convert "^1.9.0" + +ansi-styles@^4.0.0, ansi-styles@^4.1.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" + integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== + dependencies: + color-convert "^2.0.1" + +ansi-styles@^5.0.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-5.2.0.tgz#07449690ad45777d1924ac2abb2fc8895dba836b" + integrity sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA== + +anymatch@^3.0.3, anymatch@~3.1.2: + version "3.1.2" + resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.2.tgz#c0557c096af32f106198f4f4e2a383537e378716" + integrity sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg== + dependencies: + normalize-path "^3.0.0" + picomatch "^2.0.4" + +arg@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/arg/-/arg-5.0.1.tgz#eb0c9a8f77786cad2af8ff2b862899842d7b6adb" + integrity sha512-e0hDa9H2Z9AwFkk2qDlwhoMYE4eToKarchkQHovNdLTCYMHZHeRjI71crOh+dio4K6u1IcwubQqo79Ga4CyAQA== + +argparse@^1.0.7: + version "1.0.10" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" + integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== + dependencies: + sprintf-js "~1.0.2" + +argparse@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" + integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== + +aria-hidden@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/aria-hidden/-/aria-hidden-1.1.3.tgz#bb48de18dc84787a3c6eee113709c473c64ec254" + integrity sha512-RhVWFtKH5BiGMycI72q2RAFMLQi8JP9bLuQXgR5a8Znp7P5KOIADSJeyfI8PCVxLEp067B2HbP5JIiI/PXIZeA== + dependencies: + tslib "^1.0.0" + +aria-query@^4.2.2: + version "4.2.2" + resolved "https://registry.yarnpkg.com/aria-query/-/aria-query-4.2.2.tgz#0d2ca6c9aceb56b8977e9fed6aed7e15bbd2f83b" + integrity sha512-o/HelwhuKpTj/frsOsbNLNgnNGVIFsVP/SW2BSF14gVl7kAfMOJ6/8wUAUvG1R1NHKrfG+2sHZTu0yauT1qBrA== + dependencies: + "@babel/runtime" "^7.10.2" + "@babel/runtime-corejs3" "^7.10.2" + +aria-query@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/aria-query/-/aria-query-5.0.0.tgz#210c21aaf469613ee8c9a62c7f86525e058db52c" + integrity sha512-V+SM7AbUwJ+EBnB8+DXs0hPZHO0W6pqBcc0dW90OwtVG02PswOu/teuARoLQjdDOH+t9pJgGnW5/Qmouf3gPJg== + +array-flatten@1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" + integrity sha1-ml9pkFGx5wczKPKgCJaLZOopVdI= + +array-flatten@^2.1.0: + version "2.1.2" + resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-2.1.2.tgz#24ef80a28c1a893617e2149b0c6d0d788293b099" + integrity sha512-hNfzcOV8W4NdualtqBFPyVO+54DSJuZGY9qT4pRroB6S9e3iiido2ISIC5h9R2sPJ8H3FHCIiEnsv1lPXO3KtQ== + +array-includes@^3.1.3, array-includes@^3.1.4: + version "3.1.4" + resolved "https://registry.yarnpkg.com/array-includes/-/array-includes-3.1.4.tgz#f5b493162c760f3539631f005ba2bb46acb45ba9" + integrity sha512-ZTNSQkmWumEbiHO2GF4GmWxYVTiQyJy2XOTa15sdQSrvKn7l+180egQMqlrMOUMCyLMD7pmyQe4mMDUT6Behrw== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.1" + get-intrinsic "^1.1.1" + is-string "^1.0.7" + +array-union@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d" + integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== + +array.prototype.flat@^1.2.5: + version "1.2.5" + resolved "https://registry.yarnpkg.com/array.prototype.flat/-/array.prototype.flat-1.2.5.tgz#07e0975d84bbc7c48cd1879d609e682598d33e13" + integrity sha512-KaYU+S+ndVqyUnignHftkwc58o3uVU1jzczILJ1tN2YaIZpFIKBiP/x/j97E5MVPsaCloPbqWLB/8qCTVvT2qg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.0" + +array.prototype.flatmap@^1.2.5: + version "1.2.5" + resolved "https://registry.yarnpkg.com/array.prototype.flatmap/-/array.prototype.flatmap-1.2.5.tgz#908dc82d8a406930fdf38598d51e7411d18d4446" + integrity sha512-08u6rVyi1Lj7oqWbS9nUxliETrtIROT4XGTA4D/LWGten6E3ocm7cy9SIrmNHOL5XVbVuckUp3X6Xyg8/zpvHA== + dependencies: + call-bind "^1.0.0" + define-properties "^1.1.3" + es-abstract "^1.19.0" + +asap@~2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46" + integrity sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY= + +ast-types-flow@^0.0.7: + version "0.0.7" + resolved "https://registry.yarnpkg.com/ast-types-flow/-/ast-types-flow-0.0.7.tgz#f70b735c6bca1a5c9c22d982c3e39e7feba3bdad" + integrity sha1-9wtzXGvKGlycItmCw+Oef+ujva0= + +async@0.9.x: + version "0.9.2" + resolved "https://registry.yarnpkg.com/async/-/async-0.9.2.tgz#aea74d5e61c1f899613bf64bda66d4c78f2fd17d" + integrity sha1-rqdNXmHB+JlhO/ZL2mbUx48v0X0= + +async@^2.6.2: + version "2.6.3" + resolved "https://registry.yarnpkg.com/async/-/async-2.6.3.tgz#d72625e2344a3656e3a3ad4fa749fa83299d82ff" + integrity sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg== + dependencies: + lodash "^4.17.14" + +asynckit@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" + integrity sha1-x57Zf380y48robyXkLzDZkdLS3k= + +at-least-node@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/at-least-node/-/at-least-node-1.0.0.tgz#602cd4b46e844ad4effc92a8011a3c46e0238dc2" + integrity sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg== + +atob@^2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9" + integrity sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg== + +attr-accept@^2.2.1: + version "2.2.2" + resolved "https://registry.yarnpkg.com/attr-accept/-/attr-accept-2.2.2.tgz#646613809660110749e92f2c10833b70968d929b" + integrity sha512-7prDjvt9HmqiZ0cl5CRjtS84sEyhsHP2coDkaZKRKVfCDo9s7iw7ChVmar78Gu9pC4SoR/28wFu/G5JJhTnqEg== + +autoprefixer@^10.4.2: + version "10.4.2" + resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-10.4.2.tgz#25e1df09a31a9fba5c40b578936b90d35c9d4d3b" + integrity sha512-9fOPpHKuDW1w/0EKfRmVnxTDt8166MAnLI3mgZ1JCnhNtYWxcJ6Ud5CO/AVOZi/AvFa8DY9RTy3h3+tFBlrrdQ== + dependencies: + browserslist "^4.19.1" + caniuse-lite "^1.0.30001297" + fraction.js "^4.1.2" + normalize-range "^0.1.2" + picocolors "^1.0.0" + postcss-value-parser "^4.2.0" + +axe-core@^4.3.5: + version "4.3.5" + resolved "https://registry.yarnpkg.com/axe-core/-/axe-core-4.3.5.tgz#78d6911ba317a8262bfee292aeafcc1e04b49cc5" + integrity sha512-WKTW1+xAzhMS5dJsxWkliixlO/PqC4VhmO9T4juNYcaTg9jzWiJsou6m5pxWYGfigWbwzJWeFY6z47a+4neRXA== + +axobject-query@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/axobject-query/-/axobject-query-2.2.0.tgz#943d47e10c0b704aa42275e20edf3722648989be" + integrity sha512-Td525n+iPOOyUQIeBfcASuG6uJsDOITl7Mds5gFyerkWiX7qhUTdYUBlSgNMyVqtSJqwpt1kXGLdUt6SykLMRA== + +babel-jest@^27.4.2, babel-jest@^27.4.6: + version "27.4.6" + resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-27.4.6.tgz#4d024e69e241cdf4f396e453a07100f44f7ce314" + integrity sha512-qZL0JT0HS1L+lOuH+xC2DVASR3nunZi/ozGhpgauJHgmI7f8rudxf6hUjEHympdQ/J64CdKmPkgfJ+A3U6QCrg== + dependencies: + "@jest/transform" "^27.4.6" + "@jest/types" "^27.4.2" + "@types/babel__core" "^7.1.14" + babel-plugin-istanbul "^6.1.1" + babel-preset-jest "^27.4.0" + chalk "^4.0.0" + graceful-fs "^4.2.4" + slash "^3.0.0" + +babel-loader@^8.2.3: + version "8.2.3" + resolved "https://registry.yarnpkg.com/babel-loader/-/babel-loader-8.2.3.tgz#8986b40f1a64cacfcb4b8429320085ef68b1342d" + integrity sha512-n4Zeta8NC3QAsuyiizu0GkmRcQ6clkV9WFUnUf1iXP//IeSKbWjofW3UHyZVwlOB4y039YQKefawyTn64Zwbuw== + dependencies: + find-cache-dir "^3.3.1" + loader-utils "^1.4.0" + make-dir "^3.1.0" + schema-utils "^2.6.5" + +babel-plugin-dynamic-import-node@^2.3.3: + version "2.3.3" + resolved "https://registry.yarnpkg.com/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.3.tgz#84fda19c976ec5c6defef57f9427b3def66e17a3" + integrity sha512-jZVI+s9Zg3IqA/kdi0i6UDCybUI3aSBLnglhYbSSjKlV7yF1F/5LWv8MakQmvYpnbJDS6fcBL2KzHSxNCMtWSQ== + dependencies: + object.assign "^4.1.0" + +babel-plugin-istanbul@^6.1.1: + version "6.1.1" + resolved "https://registry.yarnpkg.com/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz#fa88ec59232fd9b4e36dbbc540a8ec9a9b47da73" + integrity sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + "@istanbuljs/load-nyc-config" "^1.0.0" + "@istanbuljs/schema" "^0.1.2" + istanbul-lib-instrument "^5.0.4" + test-exclude "^6.0.0" + +babel-plugin-jest-hoist@^27.4.0: + version "27.4.0" + resolved "https://registry.yarnpkg.com/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-27.4.0.tgz#d7831fc0f93573788d80dee7e682482da4c730d6" + integrity sha512-Jcu7qS4OX5kTWBc45Hz7BMmgXuJqRnhatqpUhnzGC3OBYpOmf2tv6jFNwZpwM7wU7MUuv2r9IPS/ZlYOuburVw== + dependencies: + "@babel/template" "^7.3.3" + "@babel/types" "^7.3.3" + "@types/babel__core" "^7.0.0" + "@types/babel__traverse" "^7.0.6" + +babel-plugin-macros@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/babel-plugin-macros/-/babel-plugin-macros-3.1.0.tgz#9ef6dc74deb934b4db344dc973ee851d148c50c1" + integrity sha512-Cg7TFGpIr01vOQNODXOOaGz2NpCU5gl8x1qJFbb6hbZxR7XrcE2vtbAsTAbJ7/xwJtUuJEw8K8Zr/AE0LHlesg== + dependencies: + "@babel/runtime" "^7.12.5" + cosmiconfig "^7.0.0" + resolve "^1.19.0" + +babel-plugin-named-asset-import@^0.3.8: + version "0.3.8" + resolved "https://registry.yarnpkg.com/babel-plugin-named-asset-import/-/babel-plugin-named-asset-import-0.3.8.tgz#6b7fa43c59229685368683c28bc9734f24524cc2" + integrity sha512-WXiAc++qo7XcJ1ZnTYGtLxmBCVbddAml3CEXgWaBzNzLNoxtQ8AiGEFDMOhot9XjTCQbvP5E77Fj9Gk924f00Q== + +babel-plugin-polyfill-corejs2@^0.3.0: + version "0.3.1" + resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.3.1.tgz#440f1b70ccfaabc6b676d196239b138f8a2cfba5" + integrity sha512-v7/T6EQcNfVLfcN2X8Lulb7DjprieyLWJK/zOWH5DUYcAgex9sP3h25Q+DLsX9TloXe3y1O8l2q2Jv9q8UVB9w== + dependencies: + "@babel/compat-data" "^7.13.11" + "@babel/helper-define-polyfill-provider" "^0.3.1" + semver "^6.1.1" + +babel-plugin-polyfill-corejs3@^0.5.0: + version "0.5.1" + resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.5.1.tgz#d66183bf10976ea677f4149a7fcc4d8df43d4060" + integrity sha512-TihqEe4sQcb/QcPJvxe94/9RZuLQuF1+To4WqQcRvc+3J3gLCPIPgDKzGLG6zmQLfH3nn25heRuDNkS2KR4I8A== + dependencies: + "@babel/helper-define-polyfill-provider" "^0.3.1" + core-js-compat "^3.20.0" + +babel-plugin-polyfill-regenerator@^0.3.0: + version "0.3.1" + resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.3.1.tgz#2c0678ea47c75c8cc2fbb1852278d8fb68233990" + integrity sha512-Y2B06tvgHYt1x0yz17jGkGeeMr5FeKUu+ASJ+N6nB5lQ8Dapfg42i0OVrf8PNGJ3zKL4A23snMi1IRwrqqND7A== + dependencies: + "@babel/helper-define-polyfill-provider" "^0.3.1" + +babel-plugin-transform-react-remove-prop-types@^0.4.24: + version "0.4.24" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-react-remove-prop-types/-/babel-plugin-transform-react-remove-prop-types-0.4.24.tgz#f2edaf9b4c6a5fbe5c1d678bfb531078c1555f3a" + integrity sha512-eqj0hVcJUR57/Ug2zE1Yswsw4LhuqqHhD+8v120T1cl3kjg76QwtyBrdIk4WVwK+lAhBJVYCd/v+4nc4y+8JsA== + +babel-preset-current-node-syntax@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.1.tgz#b4399239b89b2a011f9ddbe3e4f401fc40cff73b" + integrity sha512-M7LQ0bxarkxQoN+vz5aJPsLBn77n8QgTFmo8WK0/44auK2xlCXrYcUxHFxgU7qW5Yzw/CjmLRK2uJzaCd7LvqQ== + dependencies: + "@babel/plugin-syntax-async-generators" "^7.8.4" + "@babel/plugin-syntax-bigint" "^7.8.3" + "@babel/plugin-syntax-class-properties" "^7.8.3" + "@babel/plugin-syntax-import-meta" "^7.8.3" + "@babel/plugin-syntax-json-strings" "^7.8.3" + "@babel/plugin-syntax-logical-assignment-operators" "^7.8.3" + "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" + "@babel/plugin-syntax-numeric-separator" "^7.8.3" + "@babel/plugin-syntax-object-rest-spread" "^7.8.3" + "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" + "@babel/plugin-syntax-optional-chaining" "^7.8.3" + "@babel/plugin-syntax-top-level-await" "^7.8.3" + +babel-preset-jest@^27.4.0: + version "27.4.0" + resolved "https://registry.yarnpkg.com/babel-preset-jest/-/babel-preset-jest-27.4.0.tgz#70d0e676a282ccb200fbabd7f415db5fdf393bca" + integrity sha512-NK4jGYpnBvNxcGo7/ZpZJr51jCGT+3bwwpVIDY2oNfTxJJldRtB4VAcYdgp1loDE50ODuTu+yBjpMAswv5tlpg== + dependencies: + babel-plugin-jest-hoist "^27.4.0" + babel-preset-current-node-syntax "^1.0.0" + +babel-preset-react-app@^10.0.1: + version "10.0.1" + resolved "https://registry.yarnpkg.com/babel-preset-react-app/-/babel-preset-react-app-10.0.1.tgz#ed6005a20a24f2c88521809fa9aea99903751584" + integrity sha512-b0D9IZ1WhhCWkrTXyFuIIgqGzSkRIH5D5AmB0bXbzYAB1OBAwHcUeyWW2LorutLWF5btNo/N7r/cIdmvvKJlYg== + dependencies: + "@babel/core" "^7.16.0" + "@babel/plugin-proposal-class-properties" "^7.16.0" + "@babel/plugin-proposal-decorators" "^7.16.4" + "@babel/plugin-proposal-nullish-coalescing-operator" "^7.16.0" + "@babel/plugin-proposal-numeric-separator" "^7.16.0" + "@babel/plugin-proposal-optional-chaining" "^7.16.0" + "@babel/plugin-proposal-private-methods" "^7.16.0" + "@babel/plugin-transform-flow-strip-types" "^7.16.0" + "@babel/plugin-transform-react-display-name" "^7.16.0" + "@babel/plugin-transform-runtime" "^7.16.4" + "@babel/preset-env" "^7.16.4" + "@babel/preset-react" "^7.16.0" + "@babel/preset-typescript" "^7.16.0" + "@babel/runtime" "^7.16.3" + babel-plugin-macros "^3.1.0" + babel-plugin-transform-react-remove-prop-types "^0.4.24" + +bail@^1.0.0: + version "1.0.5" + resolved "https://registry.yarnpkg.com/bail/-/bail-1.0.5.tgz#b6fa133404a392cbc1f8c4bf63f5953351e7a776" + integrity sha512-xFbRxM1tahm08yHBP16MMjVUAvDaBMD38zsM9EMAUN61omwLmKlOpB/Zku5QkjZ8TZ4vn53pj+t518cH0S03RQ== + +balanced-match@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" + integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== + +base64-js@^1.3.1: + version "1.5.1" + resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a" + integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== + +batch@0.6.1: + version "0.6.1" + resolved "https://registry.yarnpkg.com/batch/-/batch-0.6.1.tgz#dc34314f4e679318093fc760272525f94bf25c16" + integrity sha1-3DQxT05nkxgJP8dgJyUl+UvyXBY= + +bfj@^7.0.2: + version "7.0.2" + resolved "https://registry.yarnpkg.com/bfj/-/bfj-7.0.2.tgz#1988ce76f3add9ac2913fd8ba47aad9e651bfbb2" + integrity sha512-+e/UqUzwmzJamNF50tBV6tZPTORow7gQ96iFow+8b562OdMpEK0BcJEq2OSPEDmAbSMBQ7PKZ87ubFkgxpYWgw== + dependencies: + bluebird "^3.5.5" + check-types "^11.1.1" + hoopy "^0.1.4" + tryer "^1.0.1" + +big-integer@^1.6.16: + version "1.6.51" + resolved "https://registry.yarnpkg.com/big-integer/-/big-integer-1.6.51.tgz#0df92a5d9880560d3ff2d5fd20245c889d130686" + integrity sha512-GPEid2Y9QU1Exl1rpO9B2IPJGHPSupF5GnVIP0blYvNOMer2bTvSWs1jGOUg04hTmu67nmLsQ9TBo1puaotBHg== + +big.js@^5.2.2: + version "5.2.2" + resolved "https://registry.yarnpkg.com/big.js/-/big.js-5.2.2.tgz#65f0af382f578bcdc742bd9c281e9cb2d7768328" + integrity sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ== + +binary-extensions@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d" + integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== + +bl@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/bl/-/bl-4.1.0.tgz#451535264182bec2fbbc83a62ab98cf11d9f7b3a" + integrity sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w== + dependencies: + buffer "^5.5.0" + inherits "^2.0.4" + readable-stream "^3.4.0" + +bluebird@^3.5.5: + version "3.7.2" + resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.2.tgz#9f229c15be272454ffa973ace0dbee79a1b0c36f" + integrity sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg== + +body-parser@1.19.1: + version "1.19.1" + resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.19.1.tgz#1499abbaa9274af3ecc9f6f10396c995943e31d4" + integrity sha512-8ljfQi5eBk8EJfECMrgqNGWPEY5jWP+1IzkzkGdFFEwFQZZyaZ21UqdaHktgiMlH0xLHqIFtE/u2OYE5dOtViA== + dependencies: + bytes "3.1.1" + content-type "~1.0.4" + debug "2.6.9" + depd "~1.1.2" + http-errors "1.8.1" + iconv-lite "0.4.24" + on-finished "~2.3.0" + qs "6.9.6" + raw-body "2.4.2" + type-is "~1.6.18" + +bonjour@^3.5.0: + version "3.5.0" + resolved "https://registry.yarnpkg.com/bonjour/-/bonjour-3.5.0.tgz#8e890a183d8ee9a2393b3844c691a42bcf7bc9f5" + integrity sha1-jokKGD2O6aI5OzhExpGkK897yfU= + dependencies: + array-flatten "^2.1.0" + deep-equal "^1.0.1" + dns-equal "^1.0.0" + dns-txt "^2.0.2" + multicast-dns "^6.0.1" + multicast-dns-service-types "^1.1.0" + +boolbase@^1.0.0, boolbase@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e" + integrity sha1-aN/1++YMUes3cl6p4+0xDcwed24= + +brace-expansion@^1.1.7: + version "1.1.11" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" + integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== + dependencies: + balanced-match "^1.0.0" + concat-map "0.0.1" + +braces@^3.0.1, braces@~3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" + integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== + dependencies: + fill-range "^7.0.1" + +broadcast-channel@^3.4.1: + version "3.7.0" + resolved "https://registry.yarnpkg.com/broadcast-channel/-/broadcast-channel-3.7.0.tgz#2dfa5c7b4289547ac3f6705f9c00af8723889937" + integrity sha512-cIAKJXAxGJceNZGTZSBzMxzyOn72cVgPnKx4dc6LRjQgbaJUQqhy5rzL3zbMxkMWsGKkv2hSFkPRMEXfoMZ2Mg== + dependencies: + "@babel/runtime" "^7.7.2" + detect-node "^2.1.0" + js-sha3 "0.8.0" + microseconds "0.2.0" + nano-time "1.0.0" + oblivious-set "1.0.0" + rimraf "3.0.2" + unload "2.2.0" + +browser-process-hrtime@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz#3c9b4b7d782c8121e56f10106d84c0d0ffc94626" + integrity sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow== + +browserslist@^4.0.0, browserslist@^4.14.5, browserslist@^4.16.6, browserslist@^4.17.5, browserslist@^4.18.1, browserslist@^4.19.1: + version "4.19.1" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.19.1.tgz#4ac0435b35ab655896c31d53018b6dd5e9e4c9a3" + integrity sha512-u2tbbG5PdKRTUoctO3NBD8FQ5HdPh1ZXPHzp1rwaa5jTc+RV9/+RlWiAIKmjRPQF+xbGM9Kklj5bZQFa2s/38A== + dependencies: + caniuse-lite "^1.0.30001286" + electron-to-chromium "^1.4.17" + escalade "^3.1.1" + node-releases "^2.0.1" + picocolors "^1.0.0" + +bser@2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/bser/-/bser-2.1.1.tgz#e6787da20ece9d07998533cfd9de6f5c38f4bc05" + integrity sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ== + dependencies: + node-int64 "^0.4.0" + +buffer-from@^1.0.0: + version "1.1.2" + resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" + integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== + +buffer-indexof@^1.0.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/buffer-indexof/-/buffer-indexof-1.1.1.tgz#52fabcc6a606d1a00302802648ef68f639da268c" + integrity sha512-4/rOEg86jivtPTeOUUT61jJO1Ya1TrR/OkqCSZDyq84WJh3LuuiphBYJN+fm5xufIk4XAFcEwte/8WzC8If/1g== + +buffer@^5.5.0: + version "5.7.1" + resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.7.1.tgz#ba62e7c13133053582197160851a8f648e99eed0" + integrity sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ== + dependencies: + base64-js "^1.3.1" + ieee754 "^1.1.13" + +builtin-modules@^3.1.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-3.2.0.tgz#45d5db99e7ee5e6bc4f362e008bf917ab5049887" + integrity sha512-lGzLKcioL90C7wMczpkY0n/oART3MbBa8R9OFGE1rJxoVI86u4WAGfEk8Wjv10eKSyTHVGkSo3bvBylCEtk7LA== + +bytes@3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048" + integrity sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg= + +bytes@3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.1.tgz#3f018291cb4cbad9accb6e6970bca9c8889e879a" + integrity sha512-dWe4nWO/ruEOY7HkUJ5gFt1DCFV9zPRoJr8pV0/ASQermOZjtq8jMjOprC0Kd10GLN+l7xaUPvxzJFWtxGu8Fg== + +call-bind@^1.0.0, call-bind@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c" + integrity sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA== + dependencies: + function-bind "^1.1.1" + get-intrinsic "^1.0.2" + +callsites@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" + integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== + +camel-case@^4.1.2: + version "4.1.2" + resolved "https://registry.yarnpkg.com/camel-case/-/camel-case-4.1.2.tgz#9728072a954f805228225a6deea6b38461e1bd5a" + integrity sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw== + dependencies: + pascal-case "^3.1.2" + tslib "^2.0.3" + +camelcase-css@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/camelcase-css/-/camelcase-css-2.0.1.tgz#ee978f6947914cc30c6b44741b6ed1df7f043fd5" + integrity sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA== + +camelcase@^5.3.1: + version "5.3.1" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" + integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== + +camelcase@^6.2.0, camelcase@^6.2.1: + version "6.3.0" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a" + integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA== + +caniuse-api@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/caniuse-api/-/caniuse-api-3.0.0.tgz#5e4d90e2274961d46291997df599e3ed008ee4c0" + integrity sha512-bsTwuIg/BZZK/vreVTYYbSWoe2F+71P7K5QGEX+pT250DZbfU1MQ5prOKpPR+LL6uWKK3KMwMCAS74QB3Um1uw== + dependencies: + browserslist "^4.0.0" + caniuse-lite "^1.0.0" + lodash.memoize "^4.1.2" + lodash.uniq "^4.5.0" + +caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001286, caniuse-lite@^1.0.30001297, caniuse-lite@^1.0.30001299: + version "1.0.30001303" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001303.tgz#9b168e4f43ccfc372b86f4bc5a551d9b909c95c9" + integrity sha512-/Mqc1oESndUNszJP0kx0UaQU9kEv9nNtJ7Kn8AdA0mNnH8eR1cj0kG+NbNuC1Wq/b21eA8prhKRA3bbkjONegQ== + +case-sensitive-paths-webpack-plugin@^2.4.0: + version "2.4.0" + resolved "https://registry.yarnpkg.com/case-sensitive-paths-webpack-plugin/-/case-sensitive-paths-webpack-plugin-2.4.0.tgz#db64066c6422eed2e08cc14b986ca43796dbc6d4" + integrity sha512-roIFONhcxog0JSSWbvVAh3OocukmSgpqOH6YpMkCvav/ySIV3JKg4Dc8vYtQjYi/UxpNE36r/9v+VqTQqgkYmw== + +ccount@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/ccount/-/ccount-1.1.0.tgz#246687debb6014735131be8abab2d93898f8d043" + integrity sha512-vlNK021QdI7PNeiUh/lKkC/mNHHfV0m/Ad5JoI0TYtlBnJAslM/JIkm/tGC88bkLIwO6OQ5uV6ztS6kVAtCDlg== + +chalk@4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.1.tgz#c80b3fab28bf6371e6863325eee67e618b77e6ad" + integrity sha512-diHzdDKxcU+bAsUboHLPEDQiw0qEe0qd7SYUn3HgcFlWgbDcfLGswOHYeGrHKzG9z6UYf01d9VFMfZxPM1xZSg== + dependencies: + ansi-styles "^4.1.0" + supports-color "^7.1.0" + +chalk@^2.0.0, chalk@^2.4.1, chalk@^2.4.2: + version "2.4.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" + integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== + dependencies: + ansi-styles "^3.2.1" + escape-string-regexp "^1.0.5" + supports-color "^5.3.0" + +chalk@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-3.0.0.tgz#3f73c2bf526591f574cc492c51e2456349f844e4" + integrity sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg== + dependencies: + ansi-styles "^4.1.0" + supports-color "^7.1.0" + +chalk@^4.0.0, chalk@^4.1.0, chalk@^4.1.1, chalk@^4.1.2: + version "4.1.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" + integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== + dependencies: + ansi-styles "^4.1.0" + supports-color "^7.1.0" + +char-regex@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/char-regex/-/char-regex-1.0.2.tgz#d744358226217f981ed58f479b1d6bcc29545dcf" + integrity sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw== + +char-regex@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/char-regex/-/char-regex-2.0.0.tgz#16f98f3f874edceddd300fda5d58df380a7641a6" + integrity sha512-oGu2QekBMXgyQNWPDRQ001bjvDnZe4/zBTz37TMbiKz1NbNiyiH5hRkobe7npRN6GfbGbxMYFck/vQ1r9c1VMA== + +character-entities-html4@^1.0.0: + version "1.1.4" + resolved "https://registry.yarnpkg.com/character-entities-html4/-/character-entities-html4-1.1.4.tgz#0e64b0a3753ddbf1fdc044c5fd01d0199a02e125" + integrity sha512-HRcDxZuZqMx3/a+qrzxdBKBPUpxWEq9xw2OPZ3a/174ihfrQKVsFhqtthBInFy1zZ9GgZyFXOatNujm8M+El3g== + +character-entities-legacy@^1.0.0: + version "1.1.4" + resolved "https://registry.yarnpkg.com/character-entities-legacy/-/character-entities-legacy-1.1.4.tgz#94bc1845dce70a5bb9d2ecc748725661293d8fc1" + integrity sha512-3Xnr+7ZFS1uxeiUDvV02wQ+QDbc55o97tIV5zHScSPJpcLm/r0DFPcoY3tYRp+VZukxuMeKgXYmsXQHO05zQeA== + +character-entities@^1.0.0: + version "1.2.4" + resolved "https://registry.yarnpkg.com/character-entities/-/character-entities-1.2.4.tgz#e12c3939b7eaf4e5b15e7ad4c5e28e1d48c5b16b" + integrity sha512-iBMyeEHxfVnIakwOuDXpVkc54HijNgCyQB2w0VfGQThle6NXn50zU6V/u+LDhxHcDUPojn6Kpga3PTAD8W1bQw== + +character-reference-invalid@^1.0.0: + version "1.1.4" + resolved "https://registry.yarnpkg.com/character-reference-invalid/-/character-reference-invalid-1.1.4.tgz#083329cda0eae272ab3dbbf37e9a382c13af1560" + integrity sha512-mKKUkUbhPpQlCOfIuZkvSEgktjPFIsZKRRbC6KWVEMvlzblj3i3asQv5ODsrwt0N3pHAEvjP8KTQPHkp0+6jOg== + +chardet@^0.7.0: + version "0.7.0" + resolved "https://registry.yarnpkg.com/chardet/-/chardet-0.7.0.tgz#90094849f0937f2eedc2425d0d28a9e5f0cbad9e" + integrity sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA== + +check-types@^11.1.1: + version "11.1.2" + resolved "https://registry.yarnpkg.com/check-types/-/check-types-11.1.2.tgz#86a7c12bf5539f6324eb0e70ca8896c0e38f3e2f" + integrity sha512-tzWzvgePgLORb9/3a0YenggReLKAIb2owL03H2Xdoe5pKcUyWRSEQ8xfCar8t2SIAuEDwtmx2da1YB52YuHQMQ== + +chokidar@^3.4.2, chokidar@^3.5.2, chokidar@^3.5.3: + version "3.5.3" + resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.5.3.tgz#1cf37c8707b932bd1af1ae22c0432e2acd1903bd" + integrity sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw== + dependencies: + anymatch "~3.1.2" + braces "~3.0.2" + glob-parent "~5.1.2" + is-binary-path "~2.1.0" + is-glob "~4.0.1" + normalize-path "~3.0.0" + readdirp "~3.6.0" + optionalDependencies: + fsevents "~2.3.2" + +chroma-js@^2.1.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/chroma-js/-/chroma-js-2.3.0.tgz#fdf68a18d8f97ce006422338044218ccbffb9ecb" + integrity sha512-dRgAp9FHHy+VfE7e3/I5HHU0+zZlUHBODcjvXUXinsR/NnHCO+kfv68ofzrAqFI80q2IWvDSAmHAqHh93TGgKg== + +chrome-trace-event@^1.0.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz#1015eced4741e15d06664a957dbbf50d041e26ac" + integrity sha512-p3KULyQg4S7NIHixdwbGX+nFHkoBiA4YQmyWtjb8XngSKV124nJmRysgAeujbUVb15vh+RvFUfCPqU7rXk+hZg== + +ci-info@^3.2.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-3.3.0.tgz#b4ed1fb6818dea4803a55c623041f9165d2066b2" + integrity sha512-riT/3vI5YpVH6/qomlDnJow6TBee2PBKSEpx3O32EGPYbWGIRsIlGRms3Sm74wYE1JMo8RnO04Hb12+v1J5ICw== + +cjs-module-lexer@^1.0.0: + version "1.2.2" + resolved "https://registry.yarnpkg.com/cjs-module-lexer/-/cjs-module-lexer-1.2.2.tgz#9f84ba3244a512f3a54e5277e8eef4c489864e40" + integrity sha512-cOU9usZw8/dXIXKtwa8pM0OTJQuJkxMN6w30csNRUerHfeQ5R6U3kkU/FtJeIf3M202OHfY2U8ccInBG7/xogA== + +classnames@^2.2.6: + version "2.3.1" + resolved "https://registry.yarnpkg.com/classnames/-/classnames-2.3.1.tgz#dfcfa3891e306ec1dad105d0e88f4417b8535e8e" + integrity sha512-OlQdbZ7gLfGarSqxesMesDa5uz7KFbID8Kpq/SxIoNGDqY8lSYs0D+hhtBXhcdB3rcbXArFr7vlHheLk1voeNA== + +clean-css@^5.2.2: + version "5.2.4" + resolved "https://registry.yarnpkg.com/clean-css/-/clean-css-5.2.4.tgz#982b058f8581adb2ae062520808fb2429bd487a4" + integrity sha512-nKseG8wCzEuji/4yrgM/5cthL9oTDc5UOQyFMvW/Q53oP6gLH690o1NbuTh6Y18nujr7BxlsFuS7gXLnLzKJGg== + dependencies: + source-map "~0.6.0" + +clean-stack@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-2.2.0.tgz#ee8472dbb129e727b31e8a10a427dee9dfe4008b" + integrity sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A== + +cli-cursor@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-3.1.0.tgz#264305a7ae490d1d03bf0c9ba7c925d1753af307" + integrity sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw== + dependencies: + restore-cursor "^3.1.0" + +cli-spinners@^2.5.0: + version "2.6.1" + resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-2.6.1.tgz#adc954ebe281c37a6319bfa401e6dd2488ffb70d" + integrity sha512-x/5fWmGMnbKQAaNwN+UZlV79qBLM9JFnJuJ03gIi5whrob0xV0ofNVHy9DhwGdsMJQc2OKv0oGmLzvaqvAVv+g== + +cli-width@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-3.0.0.tgz#a2f48437a2caa9a22436e794bf071ec9e61cedf6" + integrity sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw== + +cliui@^7.0.2: + version "7.0.4" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-7.0.4.tgz#a0265ee655476fc807aea9df3df8df7783808b4f" + integrity sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ== + dependencies: + string-width "^4.2.0" + strip-ansi "^6.0.0" + wrap-ansi "^7.0.0" + +clone@^1.0.2: + version "1.0.4" + resolved "https://registry.yarnpkg.com/clone/-/clone-1.0.4.tgz#da309cc263df15994c688ca902179ca3c7cd7c7e" + integrity sha1-2jCcwmPfFZlMaIypAheco8fNfH4= + +co@^4.6.0: + version "4.6.0" + resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" + integrity sha1-bqa989hTrlTMuOR7+gvz+QMfsYQ= + +coa@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/coa/-/coa-2.0.2.tgz#43f6c21151b4ef2bf57187db0d73de229e3e7ec3" + integrity sha512-q5/jG+YQnSy4nRTV4F7lPepBJZ8qBNJJDBuJdoejDyLXgmL7IEo+Le2JDZudFTFt7mrCqIRaSjws4ygRCTCAXA== + dependencies: + "@types/q" "^1.5.1" + chalk "^2.4.1" + q "^1.1.2" + +collapse-white-space@^1.0.2: + version "1.0.6" + resolved "https://registry.yarnpkg.com/collapse-white-space/-/collapse-white-space-1.0.6.tgz#e63629c0016665792060dbbeb79c42239d2c5287" + integrity sha512-jEovNnrhMuqyCcjfEJA56v0Xq8SkIoPKDyaHahwo3POf4qcSXqMYuwNcOTzp74vTsR9Tn08z4MxWqAhcekogkQ== + +collect-v8-coverage@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/collect-v8-coverage/-/collect-v8-coverage-1.0.1.tgz#cc2c8e94fc18bbdffe64d6534570c8a673b27f59" + integrity sha512-iBPtljfCNcTKNAto0KEtDfZ3qzjJvqE3aTGZsbhjSBlorqpXJlaWWtPO35D+ZImoC3KWejX64o+yPGxhWSTzfg== + +color-convert@^1.9.0: + version "1.9.3" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" + integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== + dependencies: + color-name "1.1.3" + +color-convert@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" + integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== + dependencies: + color-name "~1.1.4" + +color-name@1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" + integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU= + +color-name@^1.1.4, color-name@~1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" + integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== + +colord@^2.9.1: + version "2.9.2" + resolved "https://registry.yarnpkg.com/colord/-/colord-2.9.2.tgz#25e2bacbbaa65991422c07ea209e2089428effb1" + integrity sha512-Uqbg+J445nc1TKn4FoDPS6ZZqAvEDnwrH42yo8B40JSOgSLxMZ/gt3h4nmCtPLQeXhjJJkqBx7SCY35WnIixaQ== + +colorette@^1.1.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/colorette/-/colorette-1.4.0.tgz#5190fbb87276259a86ad700bff2c6d6faa3fca40" + integrity sha512-Y2oEozpomLn7Q3HFP7dpww7AtMJplbM9lGZP6RDfHqmbeRjiwRg4n6VM6j4KLmRke85uWEI7JqF17f3pqdRA0g== + +colorette@^2.0.10: + version "2.0.16" + resolved "https://registry.yarnpkg.com/colorette/-/colorette-2.0.16.tgz#713b9af84fdb000139f04546bd4a93f62a5085da" + integrity sha512-hUewv7oMjCp+wkBv5Rm0v87eJhq4woh5rSR+42YSQJKecCqgIqNkZ6lAlQms/BwHPJA5NKMRlpxPRv0n8HQW6g== + +combined-stream@^1.0.8: + version "1.0.8" + resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" + integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== + dependencies: + delayed-stream "~1.0.0" + +comma-separated-tokens@^1.0.0: + version "1.0.8" + resolved "https://registry.yarnpkg.com/comma-separated-tokens/-/comma-separated-tokens-1.0.8.tgz#632b80b6117867a158f1080ad498b2fbe7e3f5ea" + integrity sha512-GHuDRO12Sypu2cV70d1dkA2EUmXHgntrzbpvOB+Qy+49ypNfGgFQIC2fhhXbnyrJRynDCAARsT7Ou0M6hirpfw== + +commander@7, commander@^7.2.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/commander/-/commander-7.2.0.tgz#a36cb57d0b501ce108e4d20559a150a391d97ab7" + integrity sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw== + +commander@^2.20.0: + version "2.20.3" + resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33" + integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== + +commander@^8.3.0: + version "8.3.0" + resolved "https://registry.yarnpkg.com/commander/-/commander-8.3.0.tgz#4837ea1b2da67b9c616a67afbb0fafee567bca66" + integrity sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww== + +common-path-prefix@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/common-path-prefix/-/common-path-prefix-3.0.0.tgz#7d007a7e07c58c4b4d5f433131a19141b29f11e0" + integrity sha512-QE33hToZseCH3jS0qN96O/bSh3kaw/h+Tq7ngyY9eWDUnTlTNUyqfqvCXioLe5Na5jFsL78ra/wuBU4iuEgd4w== + +common-tags@^1.8.0: + version "1.8.2" + resolved "https://registry.yarnpkg.com/common-tags/-/common-tags-1.8.2.tgz#94ebb3c076d26032745fd54face7f688ef5ac9c6" + integrity sha512-gk/Z852D2Wtb//0I+kRFNKKE9dIIVirjoqPoA1wJU+XePVXZfGeBpk45+A1rKO4Q43prqWBNY/MiIeRLbPWUaA== + +commondir@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b" + integrity sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs= + +compressible@~2.0.16: + version "2.0.18" + resolved "https://registry.yarnpkg.com/compressible/-/compressible-2.0.18.tgz#af53cca6b070d4c3c0750fbd77286a6d7cc46fba" + integrity sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg== + dependencies: + mime-db ">= 1.43.0 < 2" + +compression@^1.7.4: + version "1.7.4" + resolved "https://registry.yarnpkg.com/compression/-/compression-1.7.4.tgz#95523eff170ca57c29a0ca41e6fe131f41e5bb8f" + integrity sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ== + dependencies: + accepts "~1.3.5" + bytes "3.0.0" + compressible "~2.0.16" + debug "2.6.9" + on-headers "~1.0.2" + safe-buffer "5.1.2" + vary "~1.1.2" + +concat-map@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" + integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= + +confusing-browser-globals@^1.0.11: + version "1.0.11" + resolved "https://registry.yarnpkg.com/confusing-browser-globals/-/confusing-browser-globals-1.0.11.tgz#ae40e9b57cdd3915408a2805ebd3a5585608dc81" + integrity sha512-JsPKdmh8ZkmnHxDk55FZ1TqVLvEQTvoByJZRN9jzI0UjxK/QgAmsphz7PGtqgPieQZ/CQcHWXCR7ATDNhGe+YA== + +connect-history-api-fallback@^1.6.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/connect-history-api-fallback/-/connect-history-api-fallback-1.6.0.tgz#8b32089359308d111115d81cad3fceab888f97bc" + integrity sha512-e54B99q/OUoH64zYYRf3HBP5z24G38h5D3qXu23JGRoigpX5Ss4r9ZnDk3g0Z8uQC2x2lPaJ+UlWBc1ZWBWdLg== + +content-disposition@0.5.4: + version "0.5.4" + resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.4.tgz#8b82b4efac82512a02bb0b1dcec9d2c5e8eb5bfe" + integrity sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ== + dependencies: + safe-buffer "5.2.1" + +content-type@~1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b" + integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== + +convert-source-map@^1.4.0, convert-source-map@^1.6.0, convert-source-map@^1.7.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.8.0.tgz#f3373c32d21b4d780dd8004514684fb791ca4369" + integrity sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA== + dependencies: + safe-buffer "~5.1.1" + +cookie-signature@1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" + integrity sha1-4wOogrNCzD7oylE6eZmXNNqzriw= + +cookie@0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.4.1.tgz#afd713fe26ebd21ba95ceb61f9a8116e50a537d1" + integrity sha512-ZwrFkGJxUR3EIoXtO+yVE69Eb7KlixbaeAWfBQB9vVsNn/o+Yw69gBWSSDK825hQNdN+wF8zELf3dFNl/kxkUA== + +cookie@^0.4.1: + version "0.4.2" + resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.4.2.tgz#0e41f24de5ecf317947c82fc789e06a884824432" + integrity sha512-aSWTXFzaKWkvHO1Ny/s+ePFpvKsPnjc551iI41v3ny/ow6tBG5Vd+FuqGNhh1LxOmVzOlGUriIlOaokOvhaStA== + +core-js-compat@^3.20.0, core-js-compat@^3.20.2: + version "3.20.3" + resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.20.3.tgz#d71f85f94eb5e4bea3407412e549daa083d23bd6" + integrity sha512-c8M5h0IkNZ+I92QhIpuSijOxGAcj3lgpsWdkCqmUTZNwidujF4r3pi6x1DCN+Vcs5qTS2XWWMfWSuCqyupX8gw== + dependencies: + browserslist "^4.19.1" + semver "7.0.0" + +core-js-pure@^3.20.2, core-js-pure@^3.8.1: + version "3.20.3" + resolved "https://registry.yarnpkg.com/core-js-pure/-/core-js-pure-3.20.3.tgz#6cc4f36da06c61d95254efc54024fe4797fd5d02" + integrity sha512-Q2H6tQ5MtPtcC7f3HxJ48i4Q7T9ybPKgvWyuH7JXIoNa2pm0KuBnycsET/qw1SLLZYfbsbrZQNMeIOClb+6WIA== + +core-js@^3.19.2: + version "3.20.3" + resolved "https://registry.yarnpkg.com/core-js/-/core-js-3.20.3.tgz#c710d0a676e684522f3db4ee84e5e18a9d11d69a" + integrity sha512-vVl8j8ph6tRS3B8qir40H7yw7voy17xL0piAjlbBUsH7WIfzoedL/ZOr1OV9FyZQLWXsayOJyV4tnRyXR85/ag== + +core-util-is@~1.0.0: + version "1.0.3" + resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85" + integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ== + +cosmiconfig@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-6.0.0.tgz#da4fee853c52f6b1e6935f41c1a2fc50bd4a9982" + integrity sha512-xb3ZL6+L8b9JLLCx3ZdoZy4+2ECphCMo2PwqgP1tlfVq6M6YReyzBJtvWWtbDSpNr9hn96pkCiZqUcFEc+54Qg== + dependencies: + "@types/parse-json" "^4.0.0" + import-fresh "^3.1.0" + parse-json "^5.0.0" + path-type "^4.0.0" + yaml "^1.7.2" + +cosmiconfig@^7.0.0, cosmiconfig@^7.0.1: + version "7.0.1" + resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-7.0.1.tgz#714d756522cace867867ccb4474c5d01bbae5d6d" + integrity sha512-a1YWNUV2HwGimB7dU2s1wUMurNKjpx60HxBB6xUM8Re+2s1g1IIfJvFR0/iCF+XHdE0GMTKTuLR32UQff4TEyQ== + dependencies: + "@types/parse-json" "^4.0.0" + import-fresh "^3.2.1" + parse-json "^5.0.0" + path-type "^4.0.0" + yaml "^1.10.0" + +cross-spawn@^7.0.2, cross-spawn@^7.0.3: + version "7.0.3" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" + integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== + dependencies: + path-key "^3.1.0" + shebang-command "^2.0.0" + which "^2.0.1" + +crypto-random-string@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/crypto-random-string/-/crypto-random-string-2.0.0.tgz#ef2a7a966ec11083388369baa02ebead229b30d5" + integrity sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA== + +css-blank-pseudo@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/css-blank-pseudo/-/css-blank-pseudo-3.0.2.tgz#f8660f6a48b17888a9277e53f25cc5abec1f0169" + integrity sha512-hOb1LFjRR+8ocA071xUSmg5VslJ8NGo/I2qpUpdeAYyBVCgupS5O8SEVo4SxEMYyFBNodBkzG3T1iqW9HCXxew== + dependencies: + postcss-selector-parser "^6.0.8" + +css-box-model@^1.2.0: + version "1.2.1" + resolved "https://registry.yarnpkg.com/css-box-model/-/css-box-model-1.2.1.tgz#59951d3b81fd6b2074a62d49444415b0d2b4d7c1" + integrity sha512-a7Vr4Q/kd/aw96bnJG332W9V9LkJO69JRcaCYDUqjp6/z0w6VcZjgAcTbgFxEPfBgdnAwlh3iwu+hLopa+flJw== + dependencies: + tiny-invariant "^1.0.6" + +css-declaration-sorter@^6.0.3: + version "6.1.4" + resolved "https://registry.yarnpkg.com/css-declaration-sorter/-/css-declaration-sorter-6.1.4.tgz#b9bfb4ed9a41f8dcca9bf7184d849ea94a8294b4" + integrity sha512-lpfkqS0fctcmZotJGhnxkIyJWvBXgpyi2wsFd4J8VB7wzyrT6Ch/3Q+FMNJpjK4gu1+GN5khOnpU2ZVKrLbhCw== + dependencies: + timsort "^0.3.0" + +css-has-pseudo@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/css-has-pseudo/-/css-has-pseudo-3.0.3.tgz#4824a34cb92dae7e09ea1d3fd19691b653412098" + integrity sha512-0gDYWEKaGacwxCqvQ3Ypg6wGdD1AztbMm5h1JsactG2hP2eiflj808QITmuWBpE7sjSEVrAlZhPTVd/nNMj/hQ== + dependencies: + postcss-selector-parser "^6.0.8" + +css-loader@^6.5.1: + version "6.5.1" + resolved "https://registry.yarnpkg.com/css-loader/-/css-loader-6.5.1.tgz#0c43d4fbe0d97f699c91e9818cb585759091d1b1" + integrity sha512-gEy2w9AnJNnD9Kuo4XAP9VflW/ujKoS9c/syO+uWMlm5igc7LysKzPXaDoR2vroROkSwsTS2tGr1yGGEbZOYZQ== + dependencies: + icss-utils "^5.1.0" + postcss "^8.2.15" + postcss-modules-extract-imports "^3.0.0" + postcss-modules-local-by-default "^4.0.0" + postcss-modules-scope "^3.0.0" + postcss-modules-values "^4.0.0" + postcss-value-parser "^4.1.0" + semver "^7.3.5" + +css-minimizer-webpack-plugin@^3.2.0: + version "3.4.1" + resolved "https://registry.yarnpkg.com/css-minimizer-webpack-plugin/-/css-minimizer-webpack-plugin-3.4.1.tgz#ab78f781ced9181992fe7b6e4f3422e76429878f" + integrity sha512-1u6D71zeIfgngN2XNRJefc/hY7Ybsxd74Jm4qngIXyUEk7fss3VUzuHxLAq/R8NAba4QU9OUSaMZlbpRc7bM4Q== + dependencies: + cssnano "^5.0.6" + jest-worker "^27.0.2" + postcss "^8.3.5" + schema-utils "^4.0.0" + serialize-javascript "^6.0.0" + source-map "^0.6.1" + +css-prefers-color-scheme@^6.0.2: + version "6.0.2" + resolved "https://registry.yarnpkg.com/css-prefers-color-scheme/-/css-prefers-color-scheme-6.0.2.tgz#d5c03a980caab92d8beeee176a8795d331e0c727" + integrity sha512-gv0KQBEM+q/XdoKyznovq3KW7ocO7k+FhPP+hQR1MenJdu0uPGS6IZa9PzlbqBeS6XcZJNAoqoFxlAUW461CrA== + +css-select-base-adapter@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/css-select-base-adapter/-/css-select-base-adapter-0.1.1.tgz#3b2ff4972cc362ab88561507a95408a1432135d7" + integrity sha512-jQVeeRG70QI08vSTwf1jHxp74JoZsr2XSgETae8/xC8ovSnL2WF87GTLO86Sbwdt2lK4Umg4HnnwMO4YF3Ce7w== + +css-select@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/css-select/-/css-select-2.1.0.tgz#6a34653356635934a81baca68d0255432105dbef" + integrity sha512-Dqk7LQKpwLoH3VovzZnkzegqNSuAziQyNZUcrdDM401iY+R5NkGBXGmtO05/yaXQziALuPogeG0b7UAgjnTJTQ== + dependencies: + boolbase "^1.0.0" + css-what "^3.2.1" + domutils "^1.7.0" + nth-check "^1.0.2" + +css-select@^4.1.3: + version "4.2.1" + resolved "https://registry.yarnpkg.com/css-select/-/css-select-4.2.1.tgz#9e665d6ae4c7f9d65dbe69d0316e3221fb274cdd" + integrity sha512-/aUslKhzkTNCQUB2qTX84lVmfia9NyjP3WpDGtj/WxhwBzWBYUV3DgUpurHTme8UTPcPlAD1DJ+b0nN/t50zDQ== + dependencies: + boolbase "^1.0.0" + css-what "^5.1.0" + domhandler "^4.3.0" + domutils "^2.8.0" + nth-check "^2.0.1" + +css-tree@1.0.0-alpha.29: + version "1.0.0-alpha.29" + resolved "https://registry.yarnpkg.com/css-tree/-/css-tree-1.0.0-alpha.29.tgz#3fa9d4ef3142cbd1c301e7664c1f352bd82f5a39" + integrity sha512-sRNb1XydwkW9IOci6iB2xmy8IGCj6r/fr+JWitvJ2JxQRPzN3T4AGGVWCMlVmVwM1gtgALJRmGIlWv5ppnGGkg== + dependencies: + mdn-data "~1.1.0" + source-map "^0.5.3" + +css-tree@1.0.0-alpha.33: + version "1.0.0-alpha.33" + resolved "https://registry.yarnpkg.com/css-tree/-/css-tree-1.0.0-alpha.33.tgz#970e20e5a91f7a378ddd0fc58d0b6c8d4f3be93e" + integrity sha512-SPt57bh5nQnpsTBsx/IXbO14sRc9xXu5MtMAVuo0BaQQmyf0NupNPPSoMaqiAF5tDFafYsTkfeH4Q/HCKXkg4w== + dependencies: + mdn-data "2.0.4" + source-map "^0.5.3" + +css-tree@1.0.0-alpha.37: + version "1.0.0-alpha.37" + resolved "https://registry.yarnpkg.com/css-tree/-/css-tree-1.0.0-alpha.37.tgz#98bebd62c4c1d9f960ec340cf9f7522e30709a22" + integrity sha512-DMxWJg0rnz7UgxKT0Q1HU/L9BeJI0M6ksor0OgqOnF+aRCDWg/N2641HmVyU9KVIu0OVVWOb2IpC9A+BJRnejg== + dependencies: + mdn-data "2.0.4" + source-map "^0.6.1" + +css-tree@^1.1.2, css-tree@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/css-tree/-/css-tree-1.1.3.tgz#eb4870fb6fd7707327ec95c2ff2ab09b5e8db91d" + integrity sha512-tRpdppF7TRazZrjJ6v3stzv93qxRcSsFmW6cX0Zm2NVKpxE1WV1HblnghVv9TreireHkqI/VDEsfolRF1p6y7Q== + dependencies: + mdn-data "2.0.14" + source-map "^0.6.1" + +css-what@^3.2.1: + version "3.4.2" + resolved "https://registry.yarnpkg.com/css-what/-/css-what-3.4.2.tgz#ea7026fcb01777edbde52124e21f327e7ae950e4" + integrity sha512-ACUm3L0/jiZTqfzRM3Hi9Q8eZqd6IK37mMWPLz9PJxkLWllYeRf+EHUSHYEtFop2Eqytaq1FizFVh7XfBnXCDQ== + +css-what@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/css-what/-/css-what-5.1.0.tgz#3f7b707aadf633baf62c2ceb8579b545bb40f7fe" + integrity sha512-arSMRWIIFY0hV8pIxZMEfmMI47Wj3R/aWpZDDxWYCPEiOMv6tfOrnpDtgxBYPEQD4V0Y/958+1TdC3iWTFcUPw== + +css.escape@^1.5.1: + version "1.5.1" + resolved "https://registry.yarnpkg.com/css.escape/-/css.escape-1.5.1.tgz#42e27d4fa04ae32f931a4b4d4191fa9cddee97cb" + integrity sha1-QuJ9T6BK4y+TGktNQZH6nN3ul8s= + +css@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/css/-/css-3.0.0.tgz#4447a4d58fdd03367c516ca9f64ae365cee4aa5d" + integrity sha512-DG9pFfwOrzc+hawpmqX/dHYHJG+Bsdb0klhyi1sDneOgGOXy9wQIC8hzyVp1e4NRYDBdxcylvywPkkXCHAzTyQ== + dependencies: + inherits "^2.0.4" + source-map "^0.6.1" + source-map-resolve "^0.6.0" + +cssdb@^5.0.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/cssdb/-/cssdb-5.1.0.tgz#ec728d5f5c0811debd0820cbebda505d43003400" + integrity sha512-/vqjXhv1x9eGkE/zO6o8ZOI7dgdZbLVLUGyVRbPgk6YipXbW87YzUCcO+Jrmi5bwJlAH6oD+MNeZyRgXea1GZw== + +cssesc@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-3.0.0.tgz#37741919903b868565e1c09ea747445cd18983ee" + integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg== + +cssnano-preset-default@^5.1.11: + version "5.1.11" + resolved "https://registry.yarnpkg.com/cssnano-preset-default/-/cssnano-preset-default-5.1.11.tgz#db10fb1ecee310e8285c5aca45bd8237be206828" + integrity sha512-ETet5hqHxmzQq2ynXMOQofKuLm7VOjMiOB7E2zdtm/hSeCKlD9fabzIUV4GoPcRyJRHi+4kGf0vsfGYbQ4nmPw== + dependencies: + css-declaration-sorter "^6.0.3" + cssnano-utils "^3.0.1" + postcss-calc "^8.2.0" + postcss-colormin "^5.2.4" + postcss-convert-values "^5.0.3" + postcss-discard-comments "^5.0.2" + postcss-discard-duplicates "^5.0.2" + postcss-discard-empty "^5.0.2" + postcss-discard-overridden "^5.0.3" + postcss-merge-longhand "^5.0.5" + postcss-merge-rules "^5.0.5" + postcss-minify-font-values "^5.0.3" + postcss-minify-gradients "^5.0.5" + postcss-minify-params "^5.0.4" + postcss-minify-selectors "^5.1.2" + postcss-normalize-charset "^5.0.2" + postcss-normalize-display-values "^5.0.2" + postcss-normalize-positions "^5.0.3" + postcss-normalize-repeat-style "^5.0.3" + postcss-normalize-string "^5.0.3" + postcss-normalize-timing-functions "^5.0.2" + postcss-normalize-unicode "^5.0.3" + postcss-normalize-url "^5.0.4" + postcss-normalize-whitespace "^5.0.3" + postcss-ordered-values "^5.0.4" + postcss-reduce-initial "^5.0.2" + postcss-reduce-transforms "^5.0.3" + postcss-svgo "^5.0.3" + postcss-unique-selectors "^5.0.3" + +cssnano-utils@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/cssnano-utils/-/cssnano-utils-3.0.1.tgz#d3cc0a142d3d217f8736837ec0a2ccff6a89c6ea" + integrity sha512-VNCHL364lh++/ono+S3j9NlUK+d97KNkxI77NlqZU2W3xd2/qmyN61dsa47pTpb55zuU4G4lI7qFjAXZJH1OAQ== + +cssnano@^5.0.6: + version "5.0.16" + resolved "https://registry.yarnpkg.com/cssnano/-/cssnano-5.0.16.tgz#4ee97d30411693f3de24cef70b36f7ae2a843e04" + integrity sha512-ryhRI9/B9VFCwPbb1z60LLK5/ldoExi7nwdnJzpkLZkm2/r7j2X3jfY+ZvDVJhC/0fPZlrAguYdHNFg0iglPKQ== + dependencies: + cssnano-preset-default "^5.1.11" + lilconfig "^2.0.3" + yaml "^1.10.2" + +csso@^3.5.1: + version "3.5.1" + resolved "https://registry.yarnpkg.com/csso/-/csso-3.5.1.tgz#7b9eb8be61628973c1b261e169d2f024008e758b" + integrity sha512-vrqULLffYU1Q2tLdJvaCYbONStnfkfimRxXNaGjxMldI0C7JPBC4rB1RyjhfdZ4m1frm8pM9uRPKH3d2knZ8gg== + dependencies: + css-tree "1.0.0-alpha.29" + +csso@^4.0.2, csso@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/csso/-/csso-4.2.0.tgz#ea3a561346e8dc9f546d6febedd50187cf389529" + integrity sha512-wvlcdIbf6pwKEk7vHj8/Bkc0B4ylXZruLvOgs9doS5eOsOpuodOV2zJChSpkp+pRpYQLQMeF04nr3Z68Sta9jA== + dependencies: + css-tree "^1.1.2" + +cssom@^0.4.4: + version "0.4.4" + resolved "https://registry.yarnpkg.com/cssom/-/cssom-0.4.4.tgz#5a66cf93d2d0b661d80bf6a44fb65f5c2e4e0a10" + integrity sha512-p3pvU7r1MyyqbTk+WbNJIgJjG2VmTIaB10rI93LzVPrmDJKkzKYMtxxyAvQXR/NS6otuzveI7+7BBq3SjBS2mw== + +cssom@~0.3.6: + version "0.3.8" + resolved "https://registry.yarnpkg.com/cssom/-/cssom-0.3.8.tgz#9f1276f5b2b463f2114d3f2c75250af8c1a36f4a" + integrity sha512-b0tGHbfegbhPJpxpiBPU2sCkigAqtM9O121le6bbOlgyV+NyGyCmVfJ6QW9eRjz8CpNfWEOYBIMIGRYkLwsIYg== + +cssstyle@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/cssstyle/-/cssstyle-2.3.0.tgz#ff665a0ddbdc31864b09647f34163443d90b0852" + integrity sha512-AZL67abkUzIuvcHqk7c09cezpGNcxUxU4Ioi/05xHk4DQeTkWmGYftIE6ctU6AEt+Gn4n1lDStOtj7FKycP71A== + dependencies: + cssom "~0.3.6" + +csstype@^3.0.2: + version "3.0.10" + resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.0.10.tgz#2ad3a7bed70f35b965707c092e5f30b327c290e5" + integrity sha512-2u44ZG2OcNUO9HDp/Jl8C07x6pU/eTR3ncV91SiK3dhG9TWvRVsCoJw14Ckx5DgWkzGA3waZWO3d7pgqpUI/XA== + +"d3-array@2 - 3", "d3-array@2.10.0 - 3", "d3-array@2.5.0 - 3", d3-array@3: + version "3.1.1" + resolved "https://registry.yarnpkg.com/d3-array/-/d3-array-3.1.1.tgz#7797eb53ead6b9083c75a45a681e93fc41bc468c" + integrity sha512-33qQ+ZoZlli19IFiQx4QEpf2CBEayMRzhlisJHSCsSUbDXv6ZishqS1x7uFVClKG4Wr7rZVHvaAttoLow6GqdQ== + dependencies: + internmap "1 - 2" + +d3-axis@3: + version "3.0.0" + resolved "https://registry.yarnpkg.com/d3-axis/-/d3-axis-3.0.0.tgz#c42a4a13e8131d637b745fc2973824cfeaf93322" + integrity sha512-IH5tgjV4jE/GhHkRV0HiVYPDtvfjHQlQfJHs0usq7M30XcSBvOotpmH1IgkcXsO/5gEQZD43B//fc7SRT5S+xw== + +d3-brush@3: + version "3.0.0" + resolved "https://registry.yarnpkg.com/d3-brush/-/d3-brush-3.0.0.tgz#6f767c4ed8dcb79de7ede3e1c0f89e63ef64d31c" + integrity sha512-ALnjWlVYkXsVIGlOsuWH1+3udkYFI48Ljihfnh8FZPF2QS9o+PzGLBslO0PjzVoHLZ2KCVgAM8NVkXPJB2aNnQ== + dependencies: + d3-dispatch "1 - 3" + d3-drag "2 - 3" + d3-interpolate "1 - 3" + d3-selection "3" + d3-transition "3" + +d3-chord@3: + version "3.0.1" + resolved "https://registry.yarnpkg.com/d3-chord/-/d3-chord-3.0.1.tgz#d156d61f485fce8327e6abf339cb41d8cbba6966" + integrity sha512-VE5S6TNa+j8msksl7HwjxMHDM2yNK3XCkusIlpX5kwauBfXuyLAtNg9jCp/iHH61tgI4sb6R/EIMWCqEIdjT/g== + dependencies: + d3-path "1 - 3" + +"d3-color@1 - 3", d3-color@3: + version "3.0.1" + resolved "https://registry.yarnpkg.com/d3-color/-/d3-color-3.0.1.tgz#03316e595955d1fcd39d9f3610ad41bb90194d0a" + integrity sha512-6/SlHkDOBLyQSJ1j1Ghs82OIUXpKWlR0hCsw0XrLSQhuUPuCSmLQ1QPH98vpnQxMUQM2/gfAkUEWsupVpd9JGw== + +d3-contour@3: + version "3.0.1" + resolved "https://registry.yarnpkg.com/d3-contour/-/d3-contour-3.0.1.tgz#2c64255d43059599cd0dba8fe4cc3d51ccdd9bbd" + integrity sha512-0Oc4D0KyhwhM7ZL0RMnfGycLN7hxHB8CMmwZ3+H26PWAG0ozNuYG5hXSDNgmP1SgJkQMrlG6cP20HoaSbvcJTQ== + dependencies: + d3-array "2 - 3" + +d3-delaunay@6: + version "6.0.2" + resolved "https://registry.yarnpkg.com/d3-delaunay/-/d3-delaunay-6.0.2.tgz#7fd3717ad0eade2fc9939f4260acfb503f984e92" + integrity sha512-IMLNldruDQScrcfT+MWnazhHbDJhcRJyOEBAJfwQnHle1RPh6WDuLvxNArUju2VSMSUuKlY5BGHRJ2cYyoFLQQ== + dependencies: + delaunator "5" + +"d3-dispatch@1 - 3", d3-dispatch@3: + version "3.0.1" + resolved "https://registry.yarnpkg.com/d3-dispatch/-/d3-dispatch-3.0.1.tgz#5fc75284e9c2375c36c839411a0cf550cbfc4d5e" + integrity sha512-rzUyPU/S7rwUflMyLc1ETDeBj0NRuHKKAcvukozwhshr6g6c5d8zh4c2gQjY2bZ0dXeGLWc1PF174P2tVvKhfg== + +"d3-drag@2 - 3", d3-drag@3: + version "3.0.0" + resolved "https://registry.yarnpkg.com/d3-drag/-/d3-drag-3.0.0.tgz#994aae9cd23c719f53b5e10e3a0a6108c69607ba" + integrity sha512-pWbUJLdETVA8lQNJecMxoXfH6x+mO2UQo8rSmZ+QqxcbyA3hfeprFgIT//HW2nlHChWeIIMwS2Fq+gEARkhTkg== + dependencies: + d3-dispatch "1 - 3" + d3-selection "3" + +"d3-dsv@1 - 3", d3-dsv@3: + version "3.0.1" + resolved "https://registry.yarnpkg.com/d3-dsv/-/d3-dsv-3.0.1.tgz#c63af978f4d6a0d084a52a673922be2160789b73" + integrity sha512-UG6OvdI5afDIFP9w4G0mNq50dSOsXHJaRE8arAS5o9ApWnIElp8GZw1Dun8vP8OyHOZ/QJUKUJwxiiCCnUwm+Q== + dependencies: + commander "7" + iconv-lite "0.6" + rw "1" + +"d3-ease@1 - 3", d3-ease@3: + version "3.0.1" + resolved "https://registry.yarnpkg.com/d3-ease/-/d3-ease-3.0.1.tgz#9658ac38a2140d59d346160f1f6c30fda0bd12f4" + integrity sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w== + +d3-fetch@3: + version "3.0.1" + resolved "https://registry.yarnpkg.com/d3-fetch/-/d3-fetch-3.0.1.tgz#83141bff9856a0edb5e38de89cdcfe63d0a60a22" + integrity sha512-kpkQIM20n3oLVBKGg6oHrUchHM3xODkTzjMoj7aWQFq5QEM+R6E4WkzT5+tojDY7yjez8KgCBRoj4aEr99Fdqw== + dependencies: + d3-dsv "1 - 3" + +d3-force@3: + version "3.0.0" + resolved "https://registry.yarnpkg.com/d3-force/-/d3-force-3.0.0.tgz#3e2ba1a61e70888fe3d9194e30d6d14eece155c4" + integrity sha512-zxV/SsA+U4yte8051P4ECydjD/S+qeYtnaIyAs9tgHCqfguma/aAQDjo85A9Z6EKhBirHRJHXIgJUlffT4wdLg== + dependencies: + d3-dispatch "1 - 3" + d3-quadtree "1 - 3" + d3-timer "1 - 3" + +"d3-format@1 - 3", d3-format@3: + version "3.1.0" + resolved "https://registry.yarnpkg.com/d3-format/-/d3-format-3.1.0.tgz#9260e23a28ea5cb109e93b21a06e24e2ebd55641" + integrity sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA== + +d3-geo@3: + version "3.0.1" + resolved "https://registry.yarnpkg.com/d3-geo/-/d3-geo-3.0.1.tgz#4f92362fd8685d93e3b1fae0fd97dc8980b1ed7e" + integrity sha512-Wt23xBych5tSy9IYAM1FR2rWIBFWa52B/oF/GYe5zbdHrg08FU8+BuI6X4PvTwPDdqdAdq04fuWJpELtsaEjeA== + dependencies: + d3-array "2.5.0 - 3" + +d3-hierarchy@3: + version "3.1.1" + resolved "https://registry.yarnpkg.com/d3-hierarchy/-/d3-hierarchy-3.1.1.tgz#9cbb0ffd2375137a351e6cfeed344a06d4ff4597" + integrity sha512-LtAIu54UctRmhGKllleflmHalttH3zkfSi4NlKrTAoFKjC+AFBJohsCAdgCBYQwH0F8hIOGY89X1pPqAchlMkA== + +"d3-interpolate@1 - 3", "d3-interpolate@1.2.0 - 3", d3-interpolate@3: + version "3.0.1" + resolved "https://registry.yarnpkg.com/d3-interpolate/-/d3-interpolate-3.0.1.tgz#3c47aa5b32c5b3dfb56ef3fd4342078a632b400d" + integrity sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g== + dependencies: + d3-color "1 - 3" + +"d3-path@1 - 3", d3-path@3: + version "3.0.1" + resolved "https://registry.yarnpkg.com/d3-path/-/d3-path-3.0.1.tgz#f09dec0aaffd770b7995f1a399152bf93052321e" + integrity sha512-gq6gZom9AFZby0YLduxT1qmrp4xpBA1YZr19OI717WIdKE2OM5ETq5qrHLb301IgxhLwcuxvGZVLeeWc/k1I6w== + +d3-polygon@3: + version "3.0.1" + resolved "https://registry.yarnpkg.com/d3-polygon/-/d3-polygon-3.0.1.tgz#0b45d3dd1c48a29c8e057e6135693ec80bf16398" + integrity sha512-3vbA7vXYwfe1SYhED++fPUQlWSYTTGmFmQiany/gdbiWgU/iEyQzyymwL9SkJjFFuCS4902BSzewVGsHHmHtXg== + +"d3-quadtree@1 - 3", d3-quadtree@3: + version "3.0.1" + resolved "https://registry.yarnpkg.com/d3-quadtree/-/d3-quadtree-3.0.1.tgz#6dca3e8be2b393c9a9d514dabbd80a92deef1a4f" + integrity sha512-04xDrxQTDTCFwP5H6hRhsRcb9xxv2RzkcsygFzmkSIOJy3PeRJP7sNk3VRIbKXcog561P9oU0/rVH6vDROAgUw== + +d3-random@3: + version "3.0.1" + resolved "https://registry.yarnpkg.com/d3-random/-/d3-random-3.0.1.tgz#d4926378d333d9c0bfd1e6fa0194d30aebaa20f4" + integrity sha512-FXMe9GfxTxqd5D6jFsQ+DJ8BJS4E/fT5mqqdjovykEB2oFbTMDVdg1MGFxfQW+FBOGoB++k8swBrgwSHT1cUXQ== + +d3-scale-chromatic@3: + version "3.0.0" + resolved "https://registry.yarnpkg.com/d3-scale-chromatic/-/d3-scale-chromatic-3.0.0.tgz#15b4ceb8ca2bb0dcb6d1a641ee03d59c3b62376a" + integrity sha512-Lx9thtxAKrO2Pq6OO2Ua474opeziKr279P/TKZsMAhYyNDD3EnCffdbgeSYN5O7m2ByQsxtuP2CSDczNUIZ22g== + dependencies: + d3-color "1 - 3" + d3-interpolate "1 - 3" + +d3-scale@4: + version "4.0.2" + resolved "https://registry.yarnpkg.com/d3-scale/-/d3-scale-4.0.2.tgz#82b38e8e8ff7080764f8dcec77bd4be393689396" + integrity sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ== + dependencies: + d3-array "2.10.0 - 3" + d3-format "1 - 3" + d3-interpolate "1.2.0 - 3" + d3-time "2.1.1 - 3" + d3-time-format "2 - 4" + +"d3-selection@2 - 3", d3-selection@3: + version "3.0.0" + resolved "https://registry.yarnpkg.com/d3-selection/-/d3-selection-3.0.0.tgz#c25338207efa72cc5b9bd1458a1a41901f1e1b31" + integrity sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ== + +d3-shape@3: + version "3.1.0" + resolved "https://registry.yarnpkg.com/d3-shape/-/d3-shape-3.1.0.tgz#c8a495652d83ea6f524e482fca57aa3f8bc32556" + integrity sha512-tGDh1Muf8kWjEDT/LswZJ8WF85yDZLvVJpYU9Nq+8+yW1Z5enxrmXOhTArlkaElU+CTn0OTVNli+/i+HP45QEQ== + dependencies: + d3-path "1 - 3" + +"d3-time-format@2 - 4", d3-time-format@4: + version "4.1.0" + resolved "https://registry.yarnpkg.com/d3-time-format/-/d3-time-format-4.1.0.tgz#7ab5257a5041d11ecb4fe70a5c7d16a195bb408a" + integrity sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg== + dependencies: + d3-time "1 - 3" + +"d3-time@1 - 3", "d3-time@2.1.1 - 3", d3-time@3: + version "3.0.0" + resolved "https://registry.yarnpkg.com/d3-time/-/d3-time-3.0.0.tgz#65972cb98ae2d4954ef5c932e8704061335d4975" + integrity sha512-zmV3lRnlaLI08y9IMRXSDshQb5Nj77smnfpnd2LrBa/2K281Jijactokeak14QacHs/kKq0AQ121nidNYlarbQ== + dependencies: + d3-array "2 - 3" + +"d3-timer@1 - 3", d3-timer@3: + version "3.0.1" + resolved "https://registry.yarnpkg.com/d3-timer/-/d3-timer-3.0.1.tgz#6284d2a2708285b1abb7e201eda4380af35e63b0" + integrity sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA== + +"d3-transition@2 - 3", d3-transition@3: + version "3.0.1" + resolved "https://registry.yarnpkg.com/d3-transition/-/d3-transition-3.0.1.tgz#6869fdde1448868077fdd5989200cb61b2a1645f" + integrity sha512-ApKvfjsSR6tg06xrL434C0WydLr7JewBB3V+/39RMHsaXTOG0zmt/OAXeng5M5LBm0ojmxJrpomQVZ1aPvBL4w== + dependencies: + d3-color "1 - 3" + d3-dispatch "1 - 3" + d3-ease "1 - 3" + d3-interpolate "1 - 3" + d3-timer "1 - 3" + +d3-zoom@3: + version "3.0.0" + resolved "https://registry.yarnpkg.com/d3-zoom/-/d3-zoom-3.0.0.tgz#d13f4165c73217ffeaa54295cd6969b3e7aee8f3" + integrity sha512-b8AmV3kfQaqWAuacbPuNbL6vahnOJflOhexLzMMNLga62+/nh0JzvJ0aO/5a5MVgUFGS7Hu1P9P03o3fJkDCyw== + dependencies: + d3-dispatch "1 - 3" + d3-drag "2 - 3" + d3-interpolate "1 - 3" + d3-selection "2 - 3" + d3-transition "2 - 3" + +d3@^7.3.0: + version "7.3.0" + resolved "https://registry.yarnpkg.com/d3/-/d3-7.3.0.tgz#f3d5a22c1f658952a6491cf50132f5267ed7a40a" + integrity sha512-MDRLJCMK232OJQRqGljQ/gCxtB8k3/sLKFjftMjzPB3nKVUODpdW9Rb3vcq7U8Ka5YKoZkAmp++Ur6I+6iNWIw== + dependencies: + d3-array "3" + d3-axis "3" + d3-brush "3" + d3-chord "3" + d3-color "3" + d3-contour "3" + d3-delaunay "6" + d3-dispatch "3" + d3-drag "3" + d3-dsv "3" + d3-ease "3" + d3-fetch "3" + d3-force "3" + d3-format "3" + d3-geo "3" + d3-hierarchy "3" + d3-interpolate "3" + d3-path "3" + d3-polygon "3" + d3-quadtree "3" + d3-random "3" + d3-scale "4" + d3-scale-chromatic "3" + d3-selection "3" + d3-shape "3" + d3-time "3" + d3-time-format "4" + d3-timer "3" + d3-transition "3" + d3-zoom "3" + +damerau-levenshtein@^1.0.7: + version "1.0.8" + resolved "https://registry.yarnpkg.com/damerau-levenshtein/-/damerau-levenshtein-1.0.8.tgz#b43d286ccbd36bc5b2f7ed41caf2d0aba1f8a6e7" + integrity sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA== + +data-urls@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/data-urls/-/data-urls-2.0.0.tgz#156485a72963a970f5d5821aaf642bef2bf2db9b" + integrity sha512-X5eWTSXO/BJmpdIKCRuKUgSCgAN0OwliVK3yPKbwIWU1Tdw5BRajxlzMidvh+gwko9AfQ9zIj52pzF91Q3YAvQ== + dependencies: + abab "^2.0.3" + whatwg-mimetype "^2.3.0" + whatwg-url "^8.0.0" + +debug@2.6.9, debug@^2.6.0, debug@^2.6.9: + version "2.6.9" + resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" + integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== + dependencies: + ms "2.0.0" + +debug@4, debug@^4.1.0, debug@^4.1.1, debug@^4.3.2: + version "4.3.3" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.3.tgz#04266e0b70a98d4462e6e288e38259213332b664" + integrity sha512-/zxw5+vh1Tfv+4Qn7a5nsbcJKPaSvCDhojn6FEl9vupwK2VCSDtEiEtqr8DFtzYFOdz63LBkxec7DYuc2jon6Q== + dependencies: + ms "2.1.2" + +debug@^3.1.1, debug@^3.2.7: + version "3.2.7" + resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" + integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ== + dependencies: + ms "^2.1.1" + +decimal.js@^10.2.1: + version "10.3.1" + resolved "https://registry.yarnpkg.com/decimal.js/-/decimal.js-10.3.1.tgz#d8c3a444a9c6774ba60ca6ad7261c3a94fd5e783" + integrity sha512-V0pfhfr8suzyPGOx3nmq4aHqabehUZn6Ch9kyFpV79TGDTWFmHqUqXdabR7QHqxzrYolF4+tVmJhUG4OURg5dQ== + +decode-uri-component@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.0.tgz#eb3913333458775cb84cd1a1fae062106bb87545" + integrity sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU= + +dedent@^0.7.0: + version "0.7.0" + resolved "https://registry.yarnpkg.com/dedent/-/dedent-0.7.0.tgz#2495ddbaf6eb874abb0e1be9df22d2e5a544326c" + integrity sha1-JJXduvbrh0q7Dhvp3yLS5aVEMmw= + +deep-equal@^1.0.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/deep-equal/-/deep-equal-1.1.1.tgz#b5c98c942ceffaf7cb051e24e1434a25a2e6076a" + integrity sha512-yd9c5AdiqVcR+JjcwUQb9DkhJc8ngNr0MahEBGvDiJw8puWab2yZlh+nkasOnZP+EGTAP6rRp2JzJhJZzvNF8g== + dependencies: + is-arguments "^1.0.4" + is-date-object "^1.0.1" + is-regex "^1.0.4" + object-is "^1.0.1" + object-keys "^1.1.1" + regexp.prototype.flags "^1.2.0" + +deep-is@^0.1.3, deep-is@~0.1.3: + version "0.1.4" + resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831" + integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ== + +deepmerge@^4.2.2: + version "4.2.2" + resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.2.2.tgz#44d2ea3679b8f4d4ffba33f03d865fc1e7bf4955" + integrity sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg== + +default-gateway@^6.0.3: + version "6.0.3" + resolved "https://registry.yarnpkg.com/default-gateway/-/default-gateway-6.0.3.tgz#819494c888053bdb743edbf343d6cdf7f2943a71" + integrity sha512-fwSOJsbbNzZ/CUFpqFBqYfYNLj1NbMPm8MMCIzHjC83iSJRBEGmDUxU+WP661BaBQImeC2yHwXtz+P/O9o+XEg== + dependencies: + execa "^5.0.0" + +defaults@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/defaults/-/defaults-1.0.3.tgz#c656051e9817d9ff08ed881477f3fe4019f3ef7d" + integrity sha1-xlYFHpgX2f8I7YgUd/P+QBnz730= + dependencies: + clone "^1.0.2" + +define-lazy-prop@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz#3f7ae421129bcaaac9bc74905c98a0009ec9ee7f" + integrity sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og== + +define-properties@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.3.tgz#cf88da6cbee26fe6db7094f61d870cbd84cee9f1" + integrity sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ== + dependencies: + object-keys "^1.0.12" + +defined@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/defined/-/defined-1.0.0.tgz#c98d9bcef75674188e110969151199e39b1fa693" + integrity sha1-yY2bzvdWdBiOEQlpFRGZ45sfppM= + +del@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/del/-/del-6.0.0.tgz#0b40d0332cea743f1614f818be4feb717714c952" + integrity sha512-1shh9DQ23L16oXSZKB2JxpL7iMy2E0S9d517ptA1P8iw0alkPtQcrKH7ru31rYtKwF499HkTu+DRzq3TCKDFRQ== + dependencies: + globby "^11.0.1" + graceful-fs "^4.2.4" + is-glob "^4.0.1" + is-path-cwd "^2.2.0" + is-path-inside "^3.0.2" + p-map "^4.0.0" + rimraf "^3.0.2" + slash "^3.0.0" + +delaunator@5: + version "5.0.0" + resolved "https://registry.yarnpkg.com/delaunator/-/delaunator-5.0.0.tgz#60f052b28bd91c9b4566850ebf7756efe821d81b" + integrity sha512-AyLvtyJdbv/U1GkiS6gUUzclRoAY4Gs75qkMygJJhU75LW4DNuSF2RMzpxs9jw9Oz1BobHjTdkG3zdP55VxAqw== + dependencies: + robust-predicates "^3.0.0" + +delayed-stream@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" + integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk= + +depd@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9" + integrity sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak= + +destroy@~1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.0.4.tgz#978857442c44749e4206613e37946205826abd80" + integrity sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA= + +detect-newline@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/detect-newline/-/detect-newline-3.1.0.tgz#576f5dfc63ae1a192ff192d8ad3af6308991b651" + integrity sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA== + +detect-node-es@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/detect-node-es/-/detect-node-es-1.1.0.tgz#163acdf643330caa0b4cd7c21e7ee7755d6fa493" + integrity sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ== + +detect-node@^2.0.4, detect-node@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/detect-node/-/detect-node-2.1.0.tgz#c9c70775a49c3d03bc2c06d9a73be550f978f8b1" + integrity sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g== + +detect-port-alt@^1.1.6: + version "1.1.6" + resolved "https://registry.yarnpkg.com/detect-port-alt/-/detect-port-alt-1.1.6.tgz#24707deabe932d4a3cf621302027c2b266568275" + integrity sha512-5tQykt+LqfJFBEYaDITx7S7cR7mJ/zQmLXZ2qt5w04ainYZw6tBf9dBunMjVeVOdYVRUzUOE4HkY5J7+uttb5Q== + dependencies: + address "^1.0.1" + debug "^2.6.0" + +detective@^5.2.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/detective/-/detective-5.2.0.tgz#feb2a77e85b904ecdea459ad897cc90a99bd2a7b" + integrity sha512-6SsIx+nUUbuK0EthKjv0zrdnajCCXVYGmbYYiYjFVpzcjwEs/JMDZ8tPRG29J/HhN56t3GJp2cGSWDRjjot8Pg== + dependencies: + acorn-node "^1.6.1" + defined "^1.0.0" + minimist "^1.1.1" + +didyoumean@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/didyoumean/-/didyoumean-1.2.2.tgz#989346ffe9e839b4555ecf5666edea0d3e8ad037" + integrity sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw== + +diff-sequences@^27.4.0: + version "27.4.0" + resolved "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-27.4.0.tgz#d783920ad8d06ec718a060d00196dfef25b132a5" + integrity sha512-YqiQzkrsmHMH5uuh8OdQFU9/ZpADnwzml8z0O5HvRNda+5UZsaX/xN+AAxfR2hWq1Y7HZnAzO9J5lJXOuDz2Ww== + +dir-glob@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" + integrity sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA== + dependencies: + path-type "^4.0.0" + +dlv@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/dlv/-/dlv-1.1.3.tgz#5c198a8a11453596e751494d49874bc7732f2e79" + integrity sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA== + +dns-equal@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/dns-equal/-/dns-equal-1.0.0.tgz#b39e7f1da6eb0a75ba9c17324b34753c47e0654d" + integrity sha1-s55/HabrCnW6nBcySzR1PEfgZU0= + +dns-packet@^1.3.1: + version "1.3.4" + resolved "https://registry.yarnpkg.com/dns-packet/-/dns-packet-1.3.4.tgz#e3455065824a2507ba886c55a89963bb107dec6f" + integrity sha512-BQ6F4vycLXBvdrJZ6S3gZewt6rcrks9KBgM9vrhW+knGRqc8uEdT7fuCwloc7nny5xNoMJ17HGH0R/6fpo8ECA== + dependencies: + ip "^1.1.0" + safe-buffer "^5.0.1" + +dns-txt@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/dns-txt/-/dns-txt-2.0.2.tgz#b91d806f5d27188e4ab3e7d107d881a1cc4642b6" + integrity sha1-uR2Ab10nGI5Ks+fRB9iBocxGQrY= + dependencies: + buffer-indexof "^1.0.0" + +doctrine@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d" + integrity sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw== + dependencies: + esutils "^2.0.2" + +doctrine@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961" + integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w== + dependencies: + esutils "^2.0.2" + +dom-accessibility-api@^0.5.6, dom-accessibility-api@^0.5.9: + version "0.5.11" + resolved "https://registry.yarnpkg.com/dom-accessibility-api/-/dom-accessibility-api-0.5.11.tgz#79d5846c4f90eba3e617d9031e921de9324f84ed" + integrity sha512-7X6GvzjYf4yTdRKuCVScV+aA9Fvh5r8WzWrXBH9w82ZWB/eYDMGCnazoC/YAqAzUJWHzLOnZqr46K3iEyUhUvw== + +dom-converter@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/dom-converter/-/dom-converter-0.2.0.tgz#6721a9daee2e293682955b6afe416771627bb768" + integrity sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA== + dependencies: + utila "~0.4" + +dom-serializer@0: + version "0.2.2" + resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-0.2.2.tgz#1afb81f533717175d478655debc5e332d9f9bb51" + integrity sha512-2/xPb3ORsQ42nHYiSunXkDjPLBaEj/xTwUO4B7XCZQTRk7EBtTOPaygh10YAAh2OI1Qrp6NWfpAhzswj0ydt9g== + dependencies: + domelementtype "^2.0.1" + entities "^2.0.0" + +dom-serializer@^1.0.1: + version "1.3.2" + resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-1.3.2.tgz#6206437d32ceefaec7161803230c7a20bc1b4d91" + integrity sha512-5c54Bk5Dw4qAxNOI1pFEizPSjVsx5+bpJKmL2kPn8JhBUq2q09tTCa3mjijun2NfK78NMouDYNMBkOrPZiS+ig== + dependencies: + domelementtype "^2.0.1" + domhandler "^4.2.0" + entities "^2.0.0" + +domelementtype@1: + version "1.3.1" + resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-1.3.1.tgz#d048c44b37b0d10a7f2a3d5fee3f4333d790481f" + integrity sha512-BSKB+TSpMpFI/HOxCNr1O8aMOTZ8hT3pM3GQ0w/mWRmkhEDSFJkkyzz4XQsBV44BChwGkrDfMyjVD0eA2aFV3w== + +domelementtype@^2.0.1, domelementtype@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-2.2.0.tgz#9a0b6c2782ed6a1c7323d42267183df9bd8b1d57" + integrity sha512-DtBMo82pv1dFtUmHyr48beiuq792Sxohr+8Hm9zoxklYPfa6n0Z3Byjj2IV7bmr2IyqClnqEQhfgHJJ5QF0R5A== + +domexception@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/domexception/-/domexception-2.0.1.tgz#fb44aefba793e1574b0af6aed2801d057529f304" + integrity sha512-yxJ2mFy/sibVQlu5qHjOkf9J3K6zgmCxgJ94u2EdvDOV09H+32LtRswEcUsmUWN72pVLOEnTSRaIVVzVQgS0dg== + dependencies: + webidl-conversions "^5.0.0" + +domhandler@^4.0.0, domhandler@^4.2.0, domhandler@^4.3.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/domhandler/-/domhandler-4.3.0.tgz#16c658c626cf966967e306f966b431f77d4a5626" + integrity sha512-fC0aXNQXqKSFTr2wDNZDhsEYjCiYsDWl3D01kwt25hm1YIPyDGHvvi3rw+PLqHAl/m71MaiF7d5zvBr0p5UB2g== + dependencies: + domelementtype "^2.2.0" + +domutils@^1.7.0: + version "1.7.0" + resolved "https://registry.yarnpkg.com/domutils/-/domutils-1.7.0.tgz#56ea341e834e06e6748af7a1cb25da67ea9f8c2a" + integrity sha512-Lgd2XcJ/NjEw+7tFvfKxOzCYKZsdct5lczQ2ZaQY8Djz7pfAD3Gbp8ySJWtreII/vDlMVmxwa6pHmdxIYgttDg== + dependencies: + dom-serializer "0" + domelementtype "1" + +domutils@^2.5.2, domutils@^2.8.0: + version "2.8.0" + resolved "https://registry.yarnpkg.com/domutils/-/domutils-2.8.0.tgz#4437def5db6e2d1f5d6ee859bd95ca7d02048135" + integrity sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A== + dependencies: + dom-serializer "^1.0.1" + domelementtype "^2.2.0" + domhandler "^4.2.0" + +dot-case@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/dot-case/-/dot-case-3.0.4.tgz#9b2b670d00a431667a8a75ba29cd1b98809ce751" + integrity sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w== + dependencies: + no-case "^3.0.4" + tslib "^2.0.3" + +dotenv-expand@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/dotenv-expand/-/dotenv-expand-5.1.0.tgz#3fbaf020bfd794884072ea26b1e9791d45a629f0" + integrity sha512-YXQl1DSa4/PQyRfgrv6aoNjhasp/p4qs9FjJ4q4cQk+8m4r6k4ZSiEyytKG8f8W9gi8WsQtIObNmKd+tMzNTmA== + +dotenv@^10.0.0: + version "10.0.0" + resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-10.0.0.tgz#3d4227b8fb95f81096cdd2b66653fb2c7085ba81" + integrity sha512-rlBi9d8jpv9Sf1klPjNfFAuWDjKLwTIJJ/VxtoTwIR6hnZxcEOQCZg2oIL3MWBYw5GpUDKOEnND7LXTbIpQ03Q== + +duplexer@^0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/duplexer/-/duplexer-0.1.2.tgz#3abe43aef3835f8ae077d136ddce0f276b0400e6" + integrity sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg== + +ee-first@1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" + integrity sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0= + +ejs@^3.1.6: + version "3.1.6" + resolved "https://registry.yarnpkg.com/ejs/-/ejs-3.1.6.tgz#5bfd0a0689743bb5268b3550cceeebbc1702822a" + integrity sha512-9lt9Zse4hPucPkoP7FHDF0LQAlGyF9JVpnClFLFH3aSSbxmyoqINRpp/9wePWJTUl4KOQwRL72Iw3InHPDkoGw== + dependencies: + jake "^10.6.1" + +electron-to-chromium@^1.4.17: + version "1.4.57" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.57.tgz#2b2766df76ac8dbc0a1d41249bc5684a31849892" + integrity sha512-FNC+P5K1n6pF+M0zIK+gFCoXcJhhzDViL3DRIGy2Fv5PohuSES1JHR7T+GlwxSxlzx4yYbsuzCZvHxcBSRCIOw== + +emittery@^0.8.1: + version "0.8.1" + resolved "https://registry.yarnpkg.com/emittery/-/emittery-0.8.1.tgz#bb23cc86d03b30aa75a7f734819dee2e1ba70860" + integrity sha512-uDfvUjVrfGJJhymx/kz6prltenw1u7WrCg1oa94zYY8xxVpLLUu045LAT0dhDZdXG58/EpPL/5kA180fQ/qudg== + +emoji-regex@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" + integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== + +emoji-regex@^9.2.2: + version "9.2.2" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-9.2.2.tgz#840c8803b0d8047f4ff0cf963176b32d4ef3ed72" + integrity sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg== + +emojis-list@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-3.0.0.tgz#5570662046ad29e2e916e71aae260abdff4f6a78" + integrity sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q== + +emoticon@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/emoticon/-/emoticon-3.2.0.tgz#c008ca7d7620fac742fe1bf4af8ff8fed154ae7f" + integrity sha512-SNujglcLTTg+lDAcApPNgEdudaqQFiAbJCqzjNxJkvN9vAwCGi0uu8IUVvx+f16h+V44KCY6Y2yboroc9pilHg== + +encodeurl@~1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" + integrity sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k= + +enhanced-resolve@^5.8.3: + version "5.8.3" + resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.8.3.tgz#6d552d465cce0423f5b3d718511ea53826a7b2f0" + integrity sha512-EGAbGvH7j7Xt2nc0E7D99La1OiEs8LnyimkRgwExpUMScN6O+3x9tIWs7PLQZVNx4YD+00skHXPXi1yQHpAmZA== + dependencies: + graceful-fs "^4.2.4" + tapable "^2.2.0" + +entities@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/entities/-/entities-2.2.0.tgz#098dc90ebb83d8dffa089d55256b351d34c4da55" + integrity sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A== + +error-ex@^1.3.1: + version "1.3.2" + resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" + integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== + dependencies: + is-arrayish "^0.2.1" + +error-stack-parser@^2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/error-stack-parser/-/error-stack-parser-2.0.6.tgz#5a99a707bd7a4c58a797902d48d82803ede6aad8" + integrity sha512-d51brTeqC+BHlwF0BhPtcYgF5nlzf9ZZ0ZIUQNZpc9ZB9qw5IJ2diTrBY9jlCJkTLITYPjmiX6OWCwH+fuyNgQ== + dependencies: + stackframe "^1.1.1" + +es-abstract@^1.17.2, es-abstract@^1.19.0, es-abstract@^1.19.1: + version "1.19.1" + resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.19.1.tgz#d4885796876916959de78edaa0df456627115ec3" + integrity sha512-2vJ6tjA/UfqLm2MPs7jxVybLoB8i1t1Jd9R3kISld20sIxPcTbLuggQOUxeWeAvIUkduv/CfMjuh4WmiXr2v9w== + dependencies: + call-bind "^1.0.2" + es-to-primitive "^1.2.1" + function-bind "^1.1.1" + get-intrinsic "^1.1.1" + get-symbol-description "^1.0.0" + has "^1.0.3" + has-symbols "^1.0.2" + internal-slot "^1.0.3" + is-callable "^1.2.4" + is-negative-zero "^2.0.1" + is-regex "^1.1.4" + is-shared-array-buffer "^1.0.1" + is-string "^1.0.7" + is-weakref "^1.0.1" + object-inspect "^1.11.0" + object-keys "^1.1.1" + object.assign "^4.1.2" + string.prototype.trimend "^1.0.4" + string.prototype.trimstart "^1.0.4" + unbox-primitive "^1.0.1" + +es-module-lexer@^0.9.0: + version "0.9.3" + resolved "https://registry.yarnpkg.com/es-module-lexer/-/es-module-lexer-0.9.3.tgz#6f13db00cc38417137daf74366f535c8eb438f19" + integrity sha512-1HQ2M2sPtxwnvOvT1ZClHyQDiggdNjURWpY2we6aMKCQiUVxTmVs2UYPLIrD84sS+kMdUwfBSylbJPwNnBrnHQ== + +es-to-primitive@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" + integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== + dependencies: + is-callable "^1.1.4" + is-date-object "^1.0.1" + is-symbol "^1.0.2" + +escalade@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" + integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== + +escape-html@~1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" + integrity sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg= + +escape-string-regexp@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" + integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= + +escape-string-regexp@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz#a30304e99daa32e23b2fd20f51babd07cffca344" + integrity sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w== + +escape-string-regexp@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" + integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA== + +escodegen@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-2.0.0.tgz#5e32b12833e8aa8fa35e1bf0befa89380484c7dd" + integrity sha512-mmHKys/C8BFUGI+MAWNcSYoORYLMdPzjrknd2Vc+bUsjN5bXcr8EhrNB+UTqfL1y3I9c4fw2ihgtMPQLBRiQxw== + dependencies: + esprima "^4.0.1" + estraverse "^5.2.0" + esutils "^2.0.2" + optionator "^0.8.1" + optionalDependencies: + source-map "~0.6.1" + +eslint-config-react-app@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/eslint-config-react-app/-/eslint-config-react-app-7.0.0.tgz#0fa96d5ec1dfb99c029b1554362ab3fa1c3757df" + integrity sha512-xyymoxtIt1EOsSaGag+/jmcywRuieQoA2JbPCjnw9HukFj9/97aGPoZVFioaotzk1K5Qt9sHO5EutZbkrAXS0g== + dependencies: + "@babel/core" "^7.16.0" + "@babel/eslint-parser" "^7.16.3" + "@rushstack/eslint-patch" "^1.1.0" + "@typescript-eslint/eslint-plugin" "^5.5.0" + "@typescript-eslint/parser" "^5.5.0" + babel-preset-react-app "^10.0.1" + confusing-browser-globals "^1.0.11" + eslint-plugin-flowtype "^8.0.3" + eslint-plugin-import "^2.25.3" + eslint-plugin-jest "^25.3.0" + eslint-plugin-jsx-a11y "^6.5.1" + eslint-plugin-react "^7.27.1" + eslint-plugin-react-hooks "^4.3.0" + eslint-plugin-testing-library "^5.0.1" + +eslint-import-resolver-node@^0.3.6: + version "0.3.6" + resolved "https://registry.yarnpkg.com/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.6.tgz#4048b958395da89668252001dbd9eca6b83bacbd" + integrity sha512-0En0w03NRVMn9Uiyn8YRPDKvWjxCWkslUEhGNTdGx15RvPJYQ+lbOlqrlNI2vEAs4pDYK4f/HN2TbDmk5TP0iw== + dependencies: + debug "^3.2.7" + resolve "^1.20.0" + +eslint-module-utils@^2.7.2: + version "2.7.3" + resolved "https://registry.yarnpkg.com/eslint-module-utils/-/eslint-module-utils-2.7.3.tgz#ad7e3a10552fdd0642e1e55292781bd6e34876ee" + integrity sha512-088JEC7O3lDZM9xGe0RerkOMd0EjFl+Yvd1jPWIkMT5u3H9+HC34mWWPnqPrN13gieT9pBOO+Qt07Nb/6TresQ== + dependencies: + debug "^3.2.7" + find-up "^2.1.0" + +eslint-plugin-flowtype@^8.0.3: + version "8.0.3" + resolved "https://registry.yarnpkg.com/eslint-plugin-flowtype/-/eslint-plugin-flowtype-8.0.3.tgz#e1557e37118f24734aa3122e7536a038d34a4912" + integrity sha512-dX8l6qUL6O+fYPtpNRideCFSpmWOUVx5QcaGLVqe/vlDiBSe4vYljDWDETwnyFzpl7By/WVIu6rcrniCgH9BqQ== + dependencies: + lodash "^4.17.21" + string-natural-compare "^3.0.1" + +eslint-plugin-import@^2.25.3: + version "2.25.4" + resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.25.4.tgz#322f3f916a4e9e991ac7af32032c25ce313209f1" + integrity sha512-/KJBASVFxpu0xg1kIBn9AUa8hQVnszpwgE7Ld0lKAlx7Ie87yzEzCgSkekt+le/YVhiaosO4Y14GDAOc41nfxA== + dependencies: + array-includes "^3.1.4" + array.prototype.flat "^1.2.5" + debug "^2.6.9" + doctrine "^2.1.0" + eslint-import-resolver-node "^0.3.6" + eslint-module-utils "^2.7.2" + has "^1.0.3" + is-core-module "^2.8.0" + is-glob "^4.0.3" + minimatch "^3.0.4" + object.values "^1.1.5" + resolve "^1.20.0" + tsconfig-paths "^3.12.0" + +eslint-plugin-jest@^25.3.0: + version "25.7.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-jest/-/eslint-plugin-jest-25.7.0.tgz#ff4ac97520b53a96187bad9c9814e7d00de09a6a" + integrity sha512-PWLUEXeeF7C9QGKqvdSbzLOiLTx+bno7/HC9eefePfEb257QFHg7ye3dh80AZVkaa/RQsBB1Q/ORQvg2X7F0NQ== + dependencies: + "@typescript-eslint/experimental-utils" "^5.0.0" + +eslint-plugin-jsx-a11y@^6.5.1: + version "6.5.1" + resolved "https://registry.yarnpkg.com/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.5.1.tgz#cdbf2df901040ca140b6ec14715c988889c2a6d8" + integrity sha512-sVCFKX9fllURnXT2JwLN5Qgo24Ug5NF6dxhkmxsMEUZhXRcGg+X3e1JbJ84YePQKBl5E0ZjAH5Q4rkdcGY99+g== + dependencies: + "@babel/runtime" "^7.16.3" + aria-query "^4.2.2" + array-includes "^3.1.4" + ast-types-flow "^0.0.7" + axe-core "^4.3.5" + axobject-query "^2.2.0" + damerau-levenshtein "^1.0.7" + emoji-regex "^9.2.2" + has "^1.0.3" + jsx-ast-utils "^3.2.1" + language-tags "^1.0.5" + minimatch "^3.0.4" + +eslint-plugin-react-hooks@^4.3.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.3.0.tgz#318dbf312e06fab1c835a4abef00121751ac1172" + integrity sha512-XslZy0LnMn+84NEG9jSGR6eGqaZB3133L8xewQo3fQagbQuGt7a63gf+P1NGKZavEYEC3UXaWEAA/AqDkuN6xA== + +eslint-plugin-react@^7.27.1: + version "7.28.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-react/-/eslint-plugin-react-7.28.0.tgz#8f3ff450677571a659ce76efc6d80b6a525adbdf" + integrity sha512-IOlFIRHzWfEQQKcAD4iyYDndHwTQiCMcJVJjxempf203jnNLUnW34AXLrV33+nEXoifJE2ZEGmcjKPL8957eSw== + dependencies: + array-includes "^3.1.4" + array.prototype.flatmap "^1.2.5" + doctrine "^2.1.0" + estraverse "^5.3.0" + jsx-ast-utils "^2.4.1 || ^3.0.0" + minimatch "^3.0.4" + object.entries "^1.1.5" + object.fromentries "^2.0.5" + object.hasown "^1.1.0" + object.values "^1.1.5" + prop-types "^15.7.2" + resolve "^2.0.0-next.3" + semver "^6.3.0" + string.prototype.matchall "^4.0.6" + +eslint-plugin-testing-library@^5.0.1: + version "5.0.4" + resolved "https://registry.yarnpkg.com/eslint-plugin-testing-library/-/eslint-plugin-testing-library-5.0.4.tgz#1f18b6e7d51db8452203bcbc909efbb571e964b8" + integrity sha512-zA/NfAENCsJXujvwwiap5gsqLp2U6X7m2XA5nOksl4zzb6GpUmRNAleCll58rEP0brFVj7DZBprlIlMGIhoC7Q== + dependencies: + "@typescript-eslint/experimental-utils" "^5.9.0" + +eslint-scope@5.1.1, eslint-scope@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-5.1.1.tgz#e786e59a66cb92b3f6c1fb0d508aab174848f48c" + integrity sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw== + dependencies: + esrecurse "^4.3.0" + estraverse "^4.1.1" + +eslint-scope@^7.1.0: + version "7.1.0" + resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-7.1.0.tgz#c1f6ea30ac583031f203d65c73e723b01298f153" + integrity sha512-aWwkhnS0qAXqNOgKOK0dJ2nvzEbhEvpy8OlJ9kZ0FeZnA6zpjv1/Vei+puGFFX7zkPCkHHXb7IDX3A+7yPrRWg== + dependencies: + esrecurse "^4.3.0" + estraverse "^5.2.0" + +eslint-utils@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/eslint-utils/-/eslint-utils-3.0.0.tgz#8aebaface7345bb33559db0a1f13a1d2d48c3672" + integrity sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA== + dependencies: + eslint-visitor-keys "^2.0.0" + +eslint-visitor-keys@^2.0.0, eslint-visitor-keys@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz#f65328259305927392c938ed44eb0a5c9b2bd303" + integrity sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw== + +eslint-visitor-keys@^3.0.0, eslint-visitor-keys@^3.1.0, eslint-visitor-keys@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-3.2.0.tgz#6fbb166a6798ee5991358bc2daa1ba76cc1254a1" + integrity sha512-IOzT0X126zn7ALX0dwFiUQEdsfzrm4+ISsQS8nukaJXwEyYKRSnEIIDULYg1mCtGp7UUXgfGl7BIolXREQK+XQ== + +eslint-webpack-plugin@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/eslint-webpack-plugin/-/eslint-webpack-plugin-3.1.1.tgz#83dad2395e5f572d6f4d919eedaa9cf902890fcb" + integrity sha512-xSucskTN9tOkfW7so4EaiFIkulWLXwCB/15H917lR6pTv0Zot6/fetFucmENRb7J5whVSFKIvwnrnsa78SG2yg== + dependencies: + "@types/eslint" "^7.28.2" + jest-worker "^27.3.1" + micromatch "^4.0.4" + normalize-path "^3.0.0" + schema-utils "^3.1.1" + +eslint@^8.3.0: + version "8.8.0" + resolved "https://registry.yarnpkg.com/eslint/-/eslint-8.8.0.tgz#9762b49abad0cb4952539ffdb0a046392e571a2d" + integrity sha512-H3KXAzQGBH1plhYS3okDix2ZthuYJlQQEGE5k0IKuEqUSiyu4AmxxlJ2MtTYeJ3xB4jDhcYCwGOg2TXYdnDXlQ== + dependencies: + "@eslint/eslintrc" "^1.0.5" + "@humanwhocodes/config-array" "^0.9.2" + ajv "^6.10.0" + chalk "^4.0.0" + cross-spawn "^7.0.2" + debug "^4.3.2" + doctrine "^3.0.0" + escape-string-regexp "^4.0.0" + eslint-scope "^7.1.0" + eslint-utils "^3.0.0" + eslint-visitor-keys "^3.2.0" + espree "^9.3.0" + esquery "^1.4.0" + esutils "^2.0.2" + fast-deep-equal "^3.1.3" + file-entry-cache "^6.0.1" + functional-red-black-tree "^1.0.1" + glob-parent "^6.0.1" + globals "^13.6.0" + ignore "^5.2.0" + import-fresh "^3.0.0" + imurmurhash "^0.1.4" + is-glob "^4.0.0" + js-yaml "^4.1.0" + json-stable-stringify-without-jsonify "^1.0.1" + levn "^0.4.1" + lodash.merge "^4.6.2" + minimatch "^3.0.4" + natural-compare "^1.4.0" + optionator "^0.9.1" + regexpp "^3.2.0" + strip-ansi "^6.0.1" + strip-json-comments "^3.1.0" + text-table "^0.2.0" + v8-compile-cache "^2.0.3" + +espree@^9.2.0, espree@^9.3.0: + version "9.3.0" + resolved "https://registry.yarnpkg.com/espree/-/espree-9.3.0.tgz#c1240d79183b72aaee6ccfa5a90bc9111df085a8" + integrity sha512-d/5nCsb0JcqsSEeQzFZ8DH1RmxPcglRWh24EFTlUEmCKoehXGdpsx0RkHDubqUI8LSAIKMQp4r9SzQ3n+sm4HQ== + dependencies: + acorn "^8.7.0" + acorn-jsx "^5.3.1" + eslint-visitor-keys "^3.1.0" + +esprima@^4.0.0, esprima@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" + integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== + +esquery@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.4.0.tgz#2148ffc38b82e8c7057dfed48425b3e61f0f24a5" + integrity sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w== + dependencies: + estraverse "^5.1.0" + +esrecurse@^4.3.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921" + integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag== + dependencies: + estraverse "^5.2.0" + +estraverse@^4.1.1: + version "4.3.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" + integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== + +estraverse@^5.1.0, estraverse@^5.2.0, estraverse@^5.3.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123" + integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== + +estree-walker@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/estree-walker/-/estree-walker-0.2.1.tgz#bdafe8095383d8414d5dc2ecf4c9173b6db9412e" + integrity sha1-va/oCVOD2EFNXcLs9MkXO225QS4= + +estree-walker@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/estree-walker/-/estree-walker-1.0.1.tgz#31bc5d612c96b704106b477e6dd5d8aa138cb700" + integrity sha512-1fMXF3YP4pZZVozF8j/ZLfvnR8NSIljt56UhbZ5PeeDmmGHpgpdwQt7ITlGvYaQukCvuBRMLEiKiYC+oeIg4cg== + +estree-walker@^2.0.1: + version "2.0.2" + resolved "https://registry.yarnpkg.com/estree-walker/-/estree-walker-2.0.2.tgz#52f010178c2a4c117a7757cfe942adb7d2da4cac" + integrity sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w== + +esutils@^2.0.2: + version "2.0.3" + resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" + integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== + +etag@~1.8.1: + version "1.8.1" + resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" + integrity sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc= + +eventemitter3@^4.0.0: + version "4.0.7" + resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f" + integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw== + +events@^3.2.0, events@^3.3.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/events/-/events-3.3.0.tgz#31a95ad0a924e2d2c419a813aeb2c4e878ea7400" + integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q== + +execa@^5.0.0: + version "5.1.1" + resolved "https://registry.yarnpkg.com/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd" + integrity sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg== + dependencies: + cross-spawn "^7.0.3" + get-stream "^6.0.0" + human-signals "^2.1.0" + is-stream "^2.0.0" + merge-stream "^2.0.0" + npm-run-path "^4.0.1" + onetime "^5.1.2" + signal-exit "^3.0.3" + strip-final-newline "^2.0.0" + +exit@^0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c" + integrity sha1-BjJjj42HfMghB9MKD/8aF8uhzQw= + +expect@^27.4.6: + version "27.4.6" + resolved "https://registry.yarnpkg.com/expect/-/expect-27.4.6.tgz#f335e128b0335b6ceb4fcab67ece7cbd14c942e6" + integrity sha512-1M/0kAALIaj5LaG66sFJTbRsWTADnylly82cu4bspI0nl+pgP4E6Bh/aqdHlTUjul06K7xQnnrAoqfxVU0+/ag== + dependencies: + "@jest/types" "^27.4.2" + jest-get-type "^27.4.0" + jest-matcher-utils "^27.4.6" + jest-message-util "^27.4.6" + +express@^4.17.1: + version "4.17.2" + resolved "https://registry.yarnpkg.com/express/-/express-4.17.2.tgz#c18369f265297319beed4e5558753cc8c1364cb3" + integrity sha512-oxlxJxcQlYwqPWKVJJtvQiwHgosH/LrLSPA+H4UxpyvSS6jC5aH+5MoHFM+KABgTOt0APue4w66Ha8jCUo9QGg== + dependencies: + accepts "~1.3.7" + array-flatten "1.1.1" + body-parser "1.19.1" + content-disposition "0.5.4" + content-type "~1.0.4" + cookie "0.4.1" + cookie-signature "1.0.6" + debug "2.6.9" + depd "~1.1.2" + encodeurl "~1.0.2" + escape-html "~1.0.3" + etag "~1.8.1" + finalhandler "~1.1.2" + fresh "0.5.2" + merge-descriptors "1.0.1" + methods "~1.1.2" + on-finished "~2.3.0" + parseurl "~1.3.3" + path-to-regexp "0.1.7" + proxy-addr "~2.0.7" + qs "6.9.6" + range-parser "~1.2.1" + safe-buffer "5.2.1" + send "0.17.2" + serve-static "1.14.2" + setprototypeof "1.2.0" + statuses "~1.5.0" + type-is "~1.6.18" + utils-merge "1.0.1" + vary "~1.1.2" + +extend@^3.0.0: + version "3.0.2" + resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" + integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== + +external-editor@^3.0.3: + version "3.1.0" + resolved "https://registry.yarnpkg.com/external-editor/-/external-editor-3.1.0.tgz#cb03f740befae03ea4d283caed2741a83f335495" + integrity sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew== + dependencies: + chardet "^0.7.0" + iconv-lite "^0.4.24" + tmp "^0.0.33" + +fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: + version "3.1.3" + resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" + integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== + +fast-glob@^3.0.3, fast-glob@^3.2.11, fast-glob@^3.2.9: + version "3.2.11" + resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.11.tgz#a1172ad95ceb8a16e20caa5c5e56480e5129c1d9" + integrity sha512-xrO3+1bxSo3ZVHAnqzyuewYT6aMFHRAd4Kcs92MAonjwQZLsK9d0SF1IyQ3k5PoirxTW0Oe/RqFgMQ6TcNE5Ew== + dependencies: + "@nodelib/fs.stat" "^2.0.2" + "@nodelib/fs.walk" "^1.2.3" + glob-parent "^5.1.2" + merge2 "^1.3.0" + micromatch "^4.0.4" + +fast-json-stable-stringify@^2.0.0, fast-json-stable-stringify@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" + integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== + +fast-levenshtein@^2.0.6, fast-levenshtein@~2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" + integrity sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc= + +fastq@^1.6.0: + version "1.13.0" + resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.13.0.tgz#616760f88a7526bdfc596b7cab8c18938c36b98c" + integrity sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw== + dependencies: + reusify "^1.0.4" + +faye-websocket@^0.11.3: + version "0.11.4" + resolved "https://registry.yarnpkg.com/faye-websocket/-/faye-websocket-0.11.4.tgz#7f0d9275cfdd86a1c963dc8b65fcc451edcbb1da" + integrity sha512-CzbClwlXAuiRQAlUyfqPgvPoNKTckTPGfwZV4ZdAhVcP2lh9KUxJg2b5GkE7XbjKQ3YJnQ9z6D9ntLAlB+tP8g== + dependencies: + websocket-driver ">=0.5.1" + +fb-watchman@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/fb-watchman/-/fb-watchman-2.0.1.tgz#fc84fb39d2709cf3ff6d743706157bb5708a8a85" + integrity sha512-DkPJKQeY6kKwmuMretBhr7G6Vodr7bFwDYTXIkfG1gjvNpaxBTQV3PbXg6bR1c1UP4jPOX0jHUbbHANL9vRjVg== + dependencies: + bser "2.1.1" + +figures@^3.0.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/figures/-/figures-3.2.0.tgz#625c18bd293c604dc4a8ddb2febf0c88341746af" + integrity sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg== + dependencies: + escape-string-regexp "^1.0.5" + +file-entry-cache@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027" + integrity sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg== + dependencies: + flat-cache "^3.0.4" + +file-loader@^6.2.0: + version "6.2.0" + resolved "https://registry.yarnpkg.com/file-loader/-/file-loader-6.2.0.tgz#baef7cf8e1840df325e4390b4484879480eebe4d" + integrity sha512-qo3glqyTa61Ytg4u73GultjHGjdRyig3tG6lPtyX/jOEJvHif9uB0/OCI2Kif6ctF3caQTW2G5gym21oAsI4pw== + dependencies: + loader-utils "^2.0.0" + schema-utils "^3.0.0" + +file-selector@^0.2.2: + version "0.2.4" + resolved "https://registry.yarnpkg.com/file-selector/-/file-selector-0.2.4.tgz#7b98286f9dbb9925f420130ea5ed0a69238d4d80" + integrity sha512-ZDsQNbrv6qRi1YTDOEWzf5J2KjZ9KMI1Q2SGeTkCJmNNW25Jg4TW4UMcmoqcg4WrAyKRcpBXdbWRxkfrOzVRbA== + dependencies: + tslib "^2.0.3" + +filelist@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/filelist/-/filelist-1.0.2.tgz#80202f21462d4d1c2e214119b1807c1bc0380e5b" + integrity sha512-z7O0IS8Plc39rTCq6i6iHxk43duYOn8uFJiWSewIq0Bww1RNybVHSCjahmcC87ZqAm4OTvFzlzeGu3XAzG1ctQ== + dependencies: + minimatch "^3.0.4" + +filesize@^8.0.6: + version "8.0.7" + resolved "https://registry.yarnpkg.com/filesize/-/filesize-8.0.7.tgz#695e70d80f4e47012c132d57a059e80c6b580bd8" + integrity sha512-pjmC+bkIF8XI7fWaH8KxHcZL3DPybs1roSKP4rKDvy20tAWwIObE4+JIseG2byfGKhud5ZnM4YSGKBz7Sh0ndQ== + +fill-range@^7.0.1: + version "7.0.1" + resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" + integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== + dependencies: + to-regex-range "^5.0.1" + +filter-obj@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/filter-obj/-/filter-obj-1.1.0.tgz#9b311112bc6c6127a16e016c6c5d7f19e0805c5b" + integrity sha1-mzERErxsYSehbgFsbF1/GeCAXFs= + +finalhandler@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.1.2.tgz#b7e7d000ffd11938d0fdb053506f6ebabe9f587d" + integrity sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA== + dependencies: + debug "2.6.9" + encodeurl "~1.0.2" + escape-html "~1.0.3" + on-finished "~2.3.0" + parseurl "~1.3.3" + statuses "~1.5.0" + unpipe "~1.0.0" + +find-cache-dir@^3.3.1: + version "3.3.2" + resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-3.3.2.tgz#b30c5b6eff0730731aea9bbd9dbecbd80256d64b" + integrity sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig== + dependencies: + commondir "^1.0.1" + make-dir "^3.0.2" + pkg-dir "^4.1.0" + +find-up@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-2.1.0.tgz#45d1b7e506c717ddd482775a2b77920a3c0c57a7" + integrity sha1-RdG35QbHF93UgndaK3eSCjwMV6c= + dependencies: + locate-path "^2.0.0" + +find-up@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73" + integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg== + dependencies: + locate-path "^3.0.0" + +find-up@^4.0.0, find-up@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" + integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== + dependencies: + locate-path "^5.0.0" + path-exists "^4.0.0" + +find-up@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-5.0.0.tgz#4c92819ecb7083561e4f4a240a86be5198f536fc" + integrity sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng== + dependencies: + locate-path "^6.0.0" + path-exists "^4.0.0" + +flat-cache@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-3.0.4.tgz#61b0338302b2fe9f957dcc32fc2a87f1c3048b11" + integrity sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg== + dependencies: + flatted "^3.1.0" + rimraf "^3.0.2" + +flatted@^3.1.0: + version "3.2.5" + resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.2.5.tgz#76c8584f4fc843db64702a6bd04ab7a8bd666da3" + integrity sha512-WIWGi2L3DyTUvUrwRKgGi9TwxQMUEqPOPQBVi71R96jZXJdFskXEmf54BoZaS1kknGODoIGASGEzBUYdyMCBJg== + +focus-lock@^0.10.1: + version "0.10.1" + resolved "https://registry.yarnpkg.com/focus-lock/-/focus-lock-0.10.1.tgz#5f46fa74fefb87144479c2f8e276f0eedd8081b2" + integrity sha512-b9yUklCi4fTu2GXn7dnaVf4hiLVVBp7xTiZarAHMODV2To6Bitf6F/UI67RmKbdgJQeVwI1UO0d9HYNbXt3GkA== + dependencies: + tslib "^2.0.3" + +follow-redirects@^1.0.0: + version "1.14.7" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.14.7.tgz#2004c02eb9436eee9a21446a6477debf17e81685" + integrity sha512-+hbxoLbFMbRKDwohX8GkTataGqO6Jb7jGwpAlwgy2bIz25XtRm7KEzJM76R1WiNT5SwZkX4Y75SwBolkpmE7iQ== + +fork-ts-checker-webpack-plugin@^6.5.0: + version "6.5.0" + resolved "https://registry.yarnpkg.com/fork-ts-checker-webpack-plugin/-/fork-ts-checker-webpack-plugin-6.5.0.tgz#0282b335fa495a97e167f69018f566ea7d2a2b5e" + integrity sha512-cS178Y+xxtIjEUorcHddKS7yCMlrDPV31mt47blKKRfMd70Kxu5xruAFE2o9sDY6wVC5deuob/u/alD04YYHnw== + dependencies: + "@babel/code-frame" "^7.8.3" + "@types/json-schema" "^7.0.5" + chalk "^4.1.0" + chokidar "^3.4.2" + cosmiconfig "^6.0.0" + deepmerge "^4.2.2" + fs-extra "^9.0.0" + glob "^7.1.6" + memfs "^3.1.2" + minimatch "^3.0.4" + schema-utils "2.7.0" + semver "^7.3.2" + tapable "^1.0.0" + +form-data@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-3.0.1.tgz#ebd53791b78356a99af9a300d4282c4d5eb9755f" + integrity sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg== + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.8" + mime-types "^2.1.12" + +forwarded@0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.2.0.tgz#2269936428aad4c15c7ebe9779a84bf0b2a81811" + integrity sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow== + +fraction.js@^4.1.2: + version "4.1.2" + resolved "https://registry.yarnpkg.com/fraction.js/-/fraction.js-4.1.2.tgz#13e420a92422b6cf244dff8690ed89401029fbe8" + integrity sha512-o2RiJQ6DZaR/5+Si0qJUIy637QMRudSi9kU/FFzx9EZazrIdnBgpU+3sEWCxAVhH2RtxW2Oz+T4p2o8uOPVcgA== + +fresh@0.5.2: + version "0.5.2" + resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" + integrity sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac= + +fs-extra@^10.0.0: + version "10.0.0" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-10.0.0.tgz#9ff61b655dde53fb34a82df84bb214ce802e17c1" + integrity sha512-C5owb14u9eJwizKGdchcDUQeFtlSHHthBk8pbX9Vc1PFZrLombudjDnNns88aYslCyF6IY5SUw3Roz6xShcEIQ== + dependencies: + graceful-fs "^4.2.0" + jsonfile "^6.0.1" + universalify "^2.0.0" + +fs-extra@^8.1.0: + version "8.1.0" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-8.1.0.tgz#49d43c45a88cd9677668cb7be1b46efdb8d2e1c0" + integrity sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g== + dependencies: + graceful-fs "^4.2.0" + jsonfile "^4.0.0" + universalify "^0.1.0" + +fs-extra@^9.0.0, fs-extra@^9.0.1: + version "9.1.0" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-9.1.0.tgz#5954460c764a8da2094ba3554bf839e6b9a7c86d" + integrity sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ== + dependencies: + at-least-node "^1.0.0" + graceful-fs "^4.2.0" + jsonfile "^6.0.1" + universalify "^2.0.0" + +fs-monkey@1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/fs-monkey/-/fs-monkey-1.0.3.tgz#ae3ac92d53bb328efe0e9a1d9541f6ad8d48e2d3" + integrity sha512-cybjIfiiE+pTWicSCLFHSrXZ6EilF30oh91FDP9S2B051prEa7QWfrVTQm10/dDpswBDXZugPa1Ogu8Yh+HV0Q== + +fs.realpath@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" + integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= + +fsevents@^2.3.2, fsevents@~2.3.2: + version "2.3.2" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a" + integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA== + +function-bind@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" + integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== + +functional-red-black-tree@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz#1b0ab3bd553b2a0d6399d29c0e3ea0b252078327" + integrity sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc= + +gensync@^1.0.0-beta.2: + version "1.0.0-beta.2" + resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" + integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== + +get-caller-file@^2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" + integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== + +get-intrinsic@^1.0.2, get-intrinsic@^1.1.0, get-intrinsic@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.1.1.tgz#15f59f376f855c446963948f0d24cd3637b4abc6" + integrity sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q== + dependencies: + function-bind "^1.1.1" + has "^1.0.3" + has-symbols "^1.0.1" + +get-nonce@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/get-nonce/-/get-nonce-1.0.1.tgz#fdf3f0278073820d2ce9426c18f07481b1e0cdf3" + integrity sha512-FJhYRoDaiatfEkUK8HKlicmu/3SGFD51q3itKDGoSTysQJBnfOcxU5GxnhE1E6soB76MbT0MBtnKJuXyAx+96Q== + +get-own-enumerable-property-symbols@^3.0.0: + version "3.0.2" + resolved "https://registry.yarnpkg.com/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-3.0.2.tgz#b5fde77f22cbe35f390b4e089922c50bce6ef664" + integrity sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g== + +get-package-type@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" + integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== + +get-stream@^6.0.0: + version "6.0.1" + resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" + integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== + +get-symbol-description@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/get-symbol-description/-/get-symbol-description-1.0.0.tgz#7fdb81c900101fbd564dd5f1a30af5aadc1e58d6" + integrity sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw== + dependencies: + call-bind "^1.0.2" + get-intrinsic "^1.1.1" + +glob-parent@^5.1.2, glob-parent@~5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" + integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== + dependencies: + is-glob "^4.0.1" + +glob-parent@^6.0.1, glob-parent@^6.0.2: + version "6.0.2" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-6.0.2.tgz#6d237d99083950c79290f24c7642a3de9a28f9e3" + integrity sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A== + dependencies: + is-glob "^4.0.3" + +glob-to-regexp@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz#c75297087c851b9a578bd217dd59a92f59fe546e" + integrity sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw== + +glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6: + version "7.2.0" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.0.tgz#d15535af7732e02e948f4c41628bd910293f6023" + integrity sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.0.4" + once "^1.3.0" + path-is-absolute "^1.0.0" + +global-modules@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-2.0.0.tgz#997605ad2345f27f51539bea26574421215c7780" + integrity sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A== + dependencies: + global-prefix "^3.0.0" + +global-prefix@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/global-prefix/-/global-prefix-3.0.0.tgz#fc85f73064df69f50421f47f883fe5b913ba9b97" + integrity sha512-awConJSVCHVGND6x3tmMaKcQvwXLhjdkmomy2W+Goaui8YPgYgXJZewhg3fWC+DlfqqQuWg8AwqjGTD2nAPVWg== + dependencies: + ini "^1.3.5" + kind-of "^6.0.2" + which "^1.3.1" + +globals@^11.1.0: + version "11.12.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" + integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== + +globals@^13.6.0, globals@^13.9.0: + version "13.12.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-13.12.0.tgz#4d733760304230a0082ed96e21e5c565f898089e" + integrity sha512-uS8X6lSKN2JumVoXrbUz+uG4BYG+eiawqm3qFcT7ammfbUHeCBoJMlHcec/S3krSk73/AE/f0szYFmgAA3kYZg== + dependencies: + type-fest "^0.20.2" + +globby@10.0.1: + version "10.0.1" + resolved "https://registry.yarnpkg.com/globby/-/globby-10.0.1.tgz#4782c34cb75dd683351335c5829cc3420e606b22" + integrity sha512-sSs4inE1FB2YQiymcmTv6NWENryABjUNPeWhOvmn4SjtKybglsyPZxFB3U1/+L1bYi0rNZDqCLlHyLYDl1Pq5A== + dependencies: + "@types/glob" "^7.1.1" + array-union "^2.1.0" + dir-glob "^3.0.1" + fast-glob "^3.0.3" + glob "^7.1.3" + ignore "^5.1.1" + merge2 "^1.2.3" + slash "^3.0.0" + +globby@^11.0.1, globby@^11.0.4: + version "11.1.0" + resolved "https://registry.yarnpkg.com/globby/-/globby-11.1.0.tgz#bd4be98bb042f83d796f7e3811991fbe82a0d34b" + integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g== + dependencies: + array-union "^2.1.0" + dir-glob "^3.0.1" + fast-glob "^3.2.9" + ignore "^5.2.0" + merge2 "^1.4.1" + slash "^3.0.0" + +graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4, graceful-fs@^4.2.6, graceful-fs@^4.2.9: + version "4.2.9" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.9.tgz#041b05df45755e587a24942279b9d113146e1c96" + integrity sha512-NtNxqUcXgpW2iMrfqSfR73Glt39K+BLwWsPs94yR63v45T0Wbej7eRmL5cWfwEgqXnmjQp3zaJTshdRW/qC2ZQ== + +graphql@^15.5.1: + version "15.8.0" + resolved "https://registry.yarnpkg.com/graphql/-/graphql-15.8.0.tgz#33410e96b012fa3bdb1091cc99a94769db212b38" + integrity sha512-5gghUc24tP9HRznNpV2+FIoq3xKkj5dTQqf4v0CpdPbFVwFkWoxOM+o+2OC9ZSvjEMTjfmG9QT+gcvggTwW1zw== + +gzip-size@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/gzip-size/-/gzip-size-6.0.0.tgz#065367fd50c239c0671cbcbad5be3e2eeb10e462" + integrity sha512-ax7ZYomf6jqPTQ4+XCpUGyXKHk5WweS+e05MBO4/y3WJ5RkmPXNKvX+bx1behVILVwr6JSQvZAku021CHPXG3Q== + dependencies: + duplexer "^0.1.2" + +handle-thing@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/handle-thing/-/handle-thing-2.0.1.tgz#857f79ce359580c340d43081cc648970d0bb234e" + integrity sha512-9Qn4yBxelxoh2Ow62nP+Ka/kMnOXRi8BXnRaUwezLNhqelnN49xKz4F/dPP8OYLxLxq6JDtZb2i9XznUQbNPTg== + +harmony-reflect@^1.4.6: + version "1.6.2" + resolved "https://registry.yarnpkg.com/harmony-reflect/-/harmony-reflect-1.6.2.tgz#31ecbd32e648a34d030d86adb67d4d47547fe710" + integrity sha512-HIp/n38R9kQjDEziXyDTuW3vvoxxyxjxFzXLrBr18uB47GnSt+G9D29fqrpM5ZkspMcPICud3XsBJQ4Y2URg8g== + +has-bigints@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/has-bigints/-/has-bigints-1.0.1.tgz#64fe6acb020673e3b78db035a5af69aa9d07b113" + integrity sha512-LSBS2LjbNBTf6287JEbEzvJgftkF5qFkmCo9hDRpAzKhUOlJ+hx8dd4USs00SgsUNwc4617J9ki5YtEClM2ffA== + +has-flag@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" + integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0= + +has-flag@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" + integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== + +has-symbols@^1.0.1, has-symbols@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.2.tgz#165d3070c00309752a1236a479331e3ac56f1423" + integrity sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw== + +has-tostringtag@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-tostringtag/-/has-tostringtag-1.0.0.tgz#7e133818a7d394734f941e73c3d3f9291e658b25" + integrity sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ== + dependencies: + has-symbols "^1.0.2" + +has@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" + integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== + dependencies: + function-bind "^1.1.1" + +hast-to-hyperscript@^9.0.0: + version "9.0.1" + resolved "https://registry.yarnpkg.com/hast-to-hyperscript/-/hast-to-hyperscript-9.0.1.tgz#9b67fd188e4c81e8ad66f803855334173920218d" + integrity sha512-zQgLKqF+O2F72S1aa4y2ivxzSlko3MAvxkwG8ehGmNiqd98BIN3JM1rAJPmplEyLmGLO2QZYJtIneOSZ2YbJuA== + dependencies: + "@types/unist" "^2.0.3" + comma-separated-tokens "^1.0.0" + property-information "^5.3.0" + space-separated-tokens "^1.0.0" + style-to-object "^0.3.0" + unist-util-is "^4.0.0" + web-namespaces "^1.0.0" + +hast-util-from-parse5@^6.0.0: + version "6.0.1" + resolved "https://registry.yarnpkg.com/hast-util-from-parse5/-/hast-util-from-parse5-6.0.1.tgz#554e34abdeea25ac76f5bd950a1f0180e0b3bc2a" + integrity sha512-jeJUWiN5pSxW12Rh01smtVkZgZr33wBokLzKLwinYOUfSzm1Nl/c3GUGebDyOKjdsRgMvoVbV0VpAcpjF4NrJA== + dependencies: + "@types/parse5" "^5.0.0" + hastscript "^6.0.0" + property-information "^5.0.0" + vfile "^4.0.0" + vfile-location "^3.2.0" + web-namespaces "^1.0.0" + +hast-util-is-element@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/hast-util-is-element/-/hast-util-is-element-1.1.0.tgz#3b3ed5159a2707c6137b48637fbfe068e175a425" + integrity sha512-oUmNua0bFbdrD/ELDSSEadRVtWZOf3iF6Lbv81naqsIV99RnSCieTbWuWCY8BAeEfKJTKl0gRdokv+dELutHGQ== + +hast-util-parse-selector@^2.0.0: + version "2.2.5" + resolved "https://registry.yarnpkg.com/hast-util-parse-selector/-/hast-util-parse-selector-2.2.5.tgz#d57c23f4da16ae3c63b3b6ca4616683313499c3a" + integrity sha512-7j6mrk/qqkSehsM92wQjdIgWM2/BW61u/53G6xmC8i1OmEdKLHbk419QKQUjz6LglWsfqoiHmyMRkP1BGjecNQ== + +hast-util-raw@^6.1.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/hast-util-raw/-/hast-util-raw-6.1.0.tgz#e16a3c2642f65cc7c480c165400a40d604ab75d0" + integrity sha512-5FoZLDHBpka20OlZZ4I/+RBw5piVQ8iI1doEvffQhx5CbCyTtP8UCq8Tw6NmTAMtXgsQxmhW7Ly8OdFre5/YMQ== + dependencies: + "@types/hast" "^2.0.0" + hast-util-from-parse5 "^6.0.0" + hast-util-to-parse5 "^6.0.0" + html-void-elements "^1.0.0" + parse5 "^6.0.0" + unist-util-position "^3.0.0" + unist-util-visit "^2.0.0" + vfile "^4.0.0" + web-namespaces "^1.0.0" + xtend "^4.0.0" + zwitch "^1.0.0" + +hast-util-to-html@^7.1.1: + version "7.1.3" + resolved "https://registry.yarnpkg.com/hast-util-to-html/-/hast-util-to-html-7.1.3.tgz#9f339ca9bea71246e565fc79ff7dbfe98bb50f5e" + integrity sha512-yk2+1p3EJTEE9ZEUkgHsUSVhIpCsL/bvT8E5GzmWc+N1Po5gBw+0F8bo7dpxXR0nu0bQVxVZGX2lBGF21CmeDw== + dependencies: + ccount "^1.0.0" + comma-separated-tokens "^1.0.0" + hast-util-is-element "^1.0.0" + hast-util-whitespace "^1.0.0" + html-void-elements "^1.0.0" + property-information "^5.0.0" + space-separated-tokens "^1.0.0" + stringify-entities "^3.0.1" + unist-util-is "^4.0.0" + xtend "^4.0.0" + +hast-util-to-parse5@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/hast-util-to-parse5/-/hast-util-to-parse5-6.0.0.tgz#1ec44650b631d72952066cea9b1445df699f8479" + integrity sha512-Lu5m6Lgm/fWuz8eWnrKezHtVY83JeRGaNQ2kn9aJgqaxvVkFCZQBEhgodZUDUvoodgyROHDb3r5IxAEdl6suJQ== + dependencies: + hast-to-hyperscript "^9.0.0" + property-information "^5.0.0" + web-namespaces "^1.0.0" + xtend "^4.0.0" + zwitch "^1.0.0" + +hast-util-whitespace@^1.0.0: + version "1.0.4" + resolved "https://registry.yarnpkg.com/hast-util-whitespace/-/hast-util-whitespace-1.0.4.tgz#e4fe77c4a9ae1cb2e6c25e02df0043d0164f6e41" + integrity sha512-I5GTdSfhYfAPNztx2xJRQpG8cuDSNt599/7YUn7Gx/WxNMsG+a835k97TDkFgk123cwjfwINaZknkKkphx/f2A== + +hastscript@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/hastscript/-/hastscript-6.0.0.tgz#e8768d7eac56c3fdeac8a92830d58e811e5bf640" + integrity sha512-nDM6bvd7lIqDUiYEiu5Sl/+6ReP0BMk/2f4U/Rooccxkj0P5nm+acM5PrGJ/t5I8qPGiqZSE6hVAwZEdZIvP4w== + dependencies: + "@types/hast" "^2.0.0" + comma-separated-tokens "^1.0.0" + hast-util-parse-selector "^2.0.0" + property-information "^5.0.0" + space-separated-tokens "^1.0.0" + +he@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f" + integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw== + +headers-utils@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/headers-utils/-/headers-utils-3.0.2.tgz#dfc65feae4b0e34357308aefbcafa99c895e59ef" + integrity sha512-xAxZkM1dRyGV2Ou5bzMxBPNLoRCjcX+ya7KSWybQD2KwLphxsapUVK6x/02o7f4VU6GPSXch9vNY2+gkU8tYWQ== + +history@^5.2.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/history/-/history-5.2.0.tgz#7cdd31cf9bac3c5d31f09c231c9928fad0007b7c" + integrity sha512-uPSF6lAJb3nSePJ43hN3eKj1dTWpN9gMod0ZssbFTIsen+WehTmEadgL+kg78xLJFdRfrrC//SavDzmRVdE+Ig== + dependencies: + "@babel/runtime" "^7.7.6" + +hoist-non-react-statics@^3.3.0, hoist-non-react-statics@^3.3.1, hoist-non-react-statics@^3.3.2: + version "3.3.2" + resolved "https://registry.yarnpkg.com/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz#ece0acaf71d62c2969c2ec59feff42a4b1a85b45" + integrity sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw== + dependencies: + react-is "^16.7.0" + +hoopy@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/hoopy/-/hoopy-0.1.4.tgz#609207d661100033a9a9402ad3dea677381c1b1d" + integrity sha512-HRcs+2mr52W0K+x8RzcLzuPPmVIKMSv97RGHy0Ea9y/mpcaK+xTrjICA04KAHi4GRzxliNqNJEFYWHghy3rSfQ== + +hpack.js@^2.1.6: + version "2.1.6" + resolved "https://registry.yarnpkg.com/hpack.js/-/hpack.js-2.1.6.tgz#87774c0949e513f42e84575b3c45681fade2a0b2" + integrity sha1-h3dMCUnlE/QuhFdbPEVoH63ioLI= + dependencies: + inherits "^2.0.1" + obuf "^1.0.0" + readable-stream "^2.0.1" + wbuf "^1.1.0" + +html-encoding-sniffer@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/html-encoding-sniffer/-/html-encoding-sniffer-2.0.1.tgz#42a6dc4fd33f00281176e8b23759ca4e4fa185f3" + integrity sha512-D5JbOMBIR/TVZkubHT+OyT2705QvogUW4IBn6nHd756OwieSF9aDYFj4dv6HHEVGYbHaLETa3WggZYWWMyy3ZQ== + dependencies: + whatwg-encoding "^1.0.5" + +html-entities@^2.1.0, html-entities@^2.3.2: + version "2.3.2" + resolved "https://registry.yarnpkg.com/html-entities/-/html-entities-2.3.2.tgz#760b404685cb1d794e4f4b744332e3b00dcfe488" + integrity sha512-c3Ab/url5ksaT0WyleslpBEthOzWhrjQbg75y7XUsfSzi3Dgzt0l8w5e7DylRn15MTlMMD58dTfzddNS2kcAjQ== + +html-escaper@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453" + integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg== + +html-minifier-terser@^6.0.2: + version "6.1.0" + resolved "https://registry.yarnpkg.com/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz#bfc818934cc07918f6b3669f5774ecdfd48f32ab" + integrity sha512-YXxSlJBZTP7RS3tWnQw74ooKa6L9b9i9QYXY21eUEvhZ3u9XLfv6OnFsQq6RxkhHygsaUMvYsZRV5rU/OVNZxw== + dependencies: + camel-case "^4.1.2" + clean-css "^5.2.2" + commander "^8.3.0" + he "^1.2.0" + param-case "^3.0.4" + relateurl "^0.2.7" + terser "^5.10.0" + +html-void-elements@^1.0.0: + version "1.0.5" + resolved "https://registry.yarnpkg.com/html-void-elements/-/html-void-elements-1.0.5.tgz#ce9159494e86d95e45795b166c2021c2cfca4483" + integrity sha512-uE/TxKuyNIcx44cIWnjr/rfIATDH7ZaOMmstu0CwhFG1Dunhlp4OC6/NMbhiwoq5BpW0ubi303qnEk/PZj614w== + +html-webpack-plugin@^5.5.0: + version "5.5.0" + resolved "https://registry.yarnpkg.com/html-webpack-plugin/-/html-webpack-plugin-5.5.0.tgz#c3911936f57681c1f9f4d8b68c158cd9dfe52f50" + integrity sha512-sy88PC2cRTVxvETRgUHFrL4No3UxvcH8G1NepGhqaTT+GXN2kTamqasot0inS5hXeg1cMbFDt27zzo9p35lZVw== + dependencies: + "@types/html-minifier-terser" "^6.0.0" + html-minifier-terser "^6.0.2" + lodash "^4.17.21" + pretty-error "^4.0.0" + tapable "^2.0.0" + +htmlparser2@^6.1.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-6.1.0.tgz#c4d762b6c3371a05dbe65e94ae43a9f845fb8fb7" + integrity sha512-gyyPk6rgonLFEDGoeRgQNaEUvdJ4ktTmmUh/h2t7s+M8oPpIPxgNACWa+6ESR57kXstwqPiCut0V8NRpcwgU7A== + dependencies: + domelementtype "^2.0.1" + domhandler "^4.0.0" + domutils "^2.5.2" + entities "^2.0.0" + +http-deceiver@^1.2.7: + version "1.2.7" + resolved "https://registry.yarnpkg.com/http-deceiver/-/http-deceiver-1.2.7.tgz#fa7168944ab9a519d337cb0bec7284dc3e723d87" + integrity sha1-+nFolEq5pRnTN8sL7HKE3D5yPYc= + +http-errors@1.8.1: + version "1.8.1" + resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.8.1.tgz#7c3f28577cbc8a207388455dbd62295ed07bd68c" + integrity sha512-Kpk9Sm7NmI+RHhnj6OIWDI1d6fIoFAtFt9RLaTMRlg/8w49juAStsrBgp0Dp4OdxdVbRIeKhtCUvoi/RuAhO4g== + dependencies: + depd "~1.1.2" + inherits "2.0.4" + setprototypeof "1.2.0" + statuses ">= 1.5.0 < 2" + toidentifier "1.0.1" + +http-errors@~1.6.2: + version "1.6.3" + resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.6.3.tgz#8b55680bb4be283a0b5bf4ea2e38580be1d9320d" + integrity sha1-i1VoC7S+KDoLW/TqLjhYC+HZMg0= + dependencies: + depd "~1.1.2" + inherits "2.0.3" + setprototypeof "1.1.0" + statuses ">= 1.4.0 < 2" + +http-parser-js@>=0.5.1: + version "0.5.5" + resolved "https://registry.yarnpkg.com/http-parser-js/-/http-parser-js-0.5.5.tgz#d7c30d5d3c90d865b4a2e870181f9d6f22ac7ac5" + integrity sha512-x+JVEkO2PoM8qqpbPbOL3cqHPwerep7OwzK7Ay+sMQjKzaKCqWvjoXm5tqMP9tXWWTnTzAjIhXg+J99XYuPhPA== + +http-proxy-agent@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz#8a8c8ef7f5932ccf953c296ca8291b95aa74aa3a" + integrity sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg== + dependencies: + "@tootallnate/once" "1" + agent-base "6" + debug "4" + +http-proxy-middleware@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/http-proxy-middleware/-/http-proxy-middleware-2.0.2.tgz#94d7593790aad6b3de48164f13792262f656c332" + integrity sha512-XtmDN5w+vdFTBZaYhdJAbMqn0DP/EhkUaAeo963mojwpKMMbw6nivtFKw07D7DDOH745L5k0VL0P8KRYNEVF/g== + dependencies: + "@types/http-proxy" "^1.17.8" + http-proxy "^1.18.1" + is-glob "^4.0.1" + is-plain-obj "^3.0.0" + micromatch "^4.0.2" + +http-proxy@^1.18.1: + version "1.18.1" + resolved "https://registry.yarnpkg.com/http-proxy/-/http-proxy-1.18.1.tgz#401541f0534884bbf95260334e72f88ee3976549" + integrity sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ== + dependencies: + eventemitter3 "^4.0.0" + follow-redirects "^1.0.0" + requires-port "^1.0.0" + +https-proxy-agent@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-5.0.0.tgz#e2a90542abb68a762e0a0850f6c9edadfd8506b2" + integrity sha512-EkYm5BcKUGiduxzSt3Eppko+PiNWNEpa4ySk9vTC6wDsQJW9rHSa+UhGNJoRYp7bz6Ht1eaRIa6QaJqO5rCFbA== + dependencies: + agent-base "6" + debug "4" + +human-signals@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0" + integrity sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw== + +iconv-lite@0.4.24, iconv-lite@^0.4.24: + version "0.4.24" + resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" + integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== + dependencies: + safer-buffer ">= 2.1.2 < 3" + +iconv-lite@0.6, iconv-lite@^0.6.3: + version "0.6.3" + resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.6.3.tgz#a52f80bf38da1952eb5c681790719871a1a72501" + integrity sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw== + dependencies: + safer-buffer ">= 2.1.2 < 3.0.0" + +icss-utils@^5.0.0, icss-utils@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/icss-utils/-/icss-utils-5.1.0.tgz#c6be6858abd013d768e98366ae47e25d5887b1ae" + integrity sha512-soFhflCVWLfRNOPU3iv5Z9VUdT44xFRbzjLsEzSr5AQmgqPMTHdU3PMT1Cf1ssx8fLNJDA1juftYl+PUcv3MqA== + +idb@^6.1.4: + version "6.1.5" + resolved "https://registry.yarnpkg.com/idb/-/idb-6.1.5.tgz#dbc53e7adf1ac7c59f9b2bf56e00b4ea4fce8c7b" + integrity sha512-IJtugpKkiVXQn5Y+LteyBCNk1N8xpGV3wWZk9EVtZWH8DYkjBn0bX1XnGP9RkyZF0sAcywa6unHqSWKe7q4LGw== + +identity-obj-proxy@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/identity-obj-proxy/-/identity-obj-proxy-3.0.0.tgz#94d2bda96084453ef36fbc5aaec37e0f79f1fc14" + integrity sha1-lNK9qWCERT7zb7xarsN+D3nx/BQ= + dependencies: + harmony-reflect "^1.4.6" + +ieee754@^1.1.13: + version "1.2.1" + resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352" + integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA== + +ignore@^4.0.6: + version "4.0.6" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-4.0.6.tgz#750e3db5862087b4737ebac8207ffd1ef27b25fc" + integrity sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg== + +ignore@^5.1.1, ignore@^5.1.8, ignore@^5.2.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.0.tgz#6d3bac8fa7fe0d45d9f9be7bac2fc279577e345a" + integrity sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ== + +immer@^9.0.7: + version "9.0.12" + resolved "https://registry.yarnpkg.com/immer/-/immer-9.0.12.tgz#2d33ddf3ee1d247deab9d707ca472c8c942a0f20" + integrity sha512-lk7UNmSbAukB5B6dh9fnh5D0bJTOFKxVg2cyJWTYrWRfhLrLMBquONcUs3aFq507hNoIZEDDh8lb8UtOizSMhA== + +import-fresh@^3.0.0, import-fresh@^3.1.0, import-fresh@^3.2.1: + version "3.3.0" + resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b" + integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== + dependencies: + parent-module "^1.0.0" + resolve-from "^4.0.0" + +import-local@^3.0.2: + version "3.1.0" + resolved "https://registry.yarnpkg.com/import-local/-/import-local-3.1.0.tgz#b4479df8a5fd44f6cdce24070675676063c95cb4" + integrity sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg== + dependencies: + pkg-dir "^4.2.0" + resolve-cwd "^3.0.0" + +imurmurhash@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" + integrity sha1-khi5srkoojixPcT7a21XbyMUU+o= + +indent-string@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251" + integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg== + +inflight@^1.0.4: + version "1.0.6" + resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" + integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk= + dependencies: + once "^1.3.0" + wrappy "1" + +inherits@2, inherits@2.0.4, inherits@^2.0.0, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.3: + version "2.0.4" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" + integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== + +inherits@2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" + integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4= + +ini@^1.3.5: + version "1.3.8" + resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c" + integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew== + +inline-style-parser@0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/inline-style-parser/-/inline-style-parser-0.1.1.tgz#ec8a3b429274e9c0a1f1c4ffa9453a7fef72cea1" + integrity sha512-7NXolsK4CAS5+xvdj5OMMbI962hU/wvwoxk+LWR9Ek9bVtyuuYScDN6eS0rUm6TxApFpw7CX1o4uJzcd4AyD3Q== + +inquirer@^8.2.0: + version "8.2.0" + resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-8.2.0.tgz#f44f008dd344bbfc4b30031f45d984e034a3ac3a" + integrity sha512-0crLweprevJ02tTuA6ThpoAERAGyVILC4sS74uib58Xf/zSr1/ZWtmm7D5CI+bSQEaA04f0K7idaHpQbSWgiVQ== + dependencies: + ansi-escapes "^4.2.1" + chalk "^4.1.1" + cli-cursor "^3.1.0" + cli-width "^3.0.0" + external-editor "^3.0.3" + figures "^3.0.0" + lodash "^4.17.21" + mute-stream "0.0.8" + ora "^5.4.1" + run-async "^2.4.0" + rxjs "^7.2.0" + string-width "^4.1.0" + strip-ansi "^6.0.0" + through "^2.3.6" + +inter-ui@^3.19.3: + version "3.19.3" + resolved "https://registry.yarnpkg.com/inter-ui/-/inter-ui-3.19.3.tgz#cf4b4b6d30de8d5463e2462588654b325206488c" + integrity sha512-5FG9fjuYOXocIfjzcCBhICL5cpvwEetseL3FU6tP3d6Bn7g8wODhB+I9RNGRTizCT7CUG4GOK54OPxqq3msQgg== + +internal-slot@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/internal-slot/-/internal-slot-1.0.3.tgz#7347e307deeea2faac2ac6205d4bc7d34967f59c" + integrity sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA== + dependencies: + get-intrinsic "^1.1.0" + has "^1.0.3" + side-channel "^1.0.4" + +"internmap@1 - 2": + version "2.0.3" + resolved "https://registry.yarnpkg.com/internmap/-/internmap-2.0.3.tgz#6685f23755e43c524e251d29cbc97248e3061009" + integrity sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg== + +invariant@^2.2.4: + version "2.2.4" + resolved "https://registry.yarnpkg.com/invariant/-/invariant-2.2.4.tgz#610f3c92c9359ce1db616e538008d23ff35158e6" + integrity sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA== + dependencies: + loose-envify "^1.0.0" + +ip@^1.1.0: + version "1.1.5" + resolved "https://registry.yarnpkg.com/ip/-/ip-1.1.5.tgz#bdded70114290828c0a039e72ef25f5aaec4354a" + integrity sha1-vd7XARQpCCjAoDnnLvJfWq7ENUo= + +ipaddr.js@1.9.1: + version "1.9.1" + resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3" + integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== + +ipaddr.js@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-2.0.1.tgz#eca256a7a877e917aeb368b0a7497ddf42ef81c0" + integrity sha512-1qTgH9NG+IIJ4yfKs2e6Pp1bZg8wbDbKHT21HrLIeYBTRLgMYKnMTPAuI3Lcs61nfx5h1xlXnbJtH1kX5/d/ng== + +is-alphabetical@^1.0.0: + version "1.0.4" + resolved "https://registry.yarnpkg.com/is-alphabetical/-/is-alphabetical-1.0.4.tgz#9e7d6b94916be22153745d184c298cbf986a686d" + integrity sha512-DwzsA04LQ10FHTZuL0/grVDk4rFoVH1pjAToYwBrHSxcrBIGQuXrQMtD5U1b0U2XVgKZCTLLP8u2Qxqhy3l2Vg== + +is-alphanumerical@^1.0.0: + version "1.0.4" + resolved "https://registry.yarnpkg.com/is-alphanumerical/-/is-alphanumerical-1.0.4.tgz#7eb9a2431f855f6b1ef1a78e326df515696c4dbf" + integrity sha512-UzoZUr+XfVz3t3v4KyGEniVL9BDRoQtY7tOyrRybkVNjDFWyo1yhXNGrrBTQxp3ib9BLAWs7k2YKBQsFRkZG9A== + dependencies: + is-alphabetical "^1.0.0" + is-decimal "^1.0.0" + +is-arguments@^1.0.4: + version "1.1.1" + resolved "https://registry.yarnpkg.com/is-arguments/-/is-arguments-1.1.1.tgz#15b3f88fda01f2a97fec84ca761a560f123efa9b" + integrity sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA== + dependencies: + call-bind "^1.0.2" + has-tostringtag "^1.0.0" + +is-arrayish@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" + integrity sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0= + +is-bigint@^1.0.1: + version "1.0.4" + resolved "https://registry.yarnpkg.com/is-bigint/-/is-bigint-1.0.4.tgz#08147a1875bc2b32005d41ccd8291dffc6691df3" + integrity sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg== + dependencies: + has-bigints "^1.0.1" + +is-binary-path@~2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" + integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw== + dependencies: + binary-extensions "^2.0.0" + +is-boolean-object@^1.1.0: + version "1.1.2" + resolved "https://registry.yarnpkg.com/is-boolean-object/-/is-boolean-object-1.1.2.tgz#5c6dc200246dd9321ae4b885a114bb1f75f63719" + integrity sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA== + dependencies: + call-bind "^1.0.2" + has-tostringtag "^1.0.0" + +is-buffer@^2.0.0: + version "2.0.5" + resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-2.0.5.tgz#ebc252e400d22ff8d77fa09888821a24a658c191" + integrity sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ== + +is-callable@^1.1.4, is-callable@^1.2.4: + version "1.2.4" + resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.2.4.tgz#47301d58dd0259407865547853df6d61fe471945" + integrity sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w== + +is-core-module@^2.2.0, is-core-module@^2.8.0, is-core-module@^2.8.1: + version "2.8.1" + resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.8.1.tgz#f59fdfca701d5879d0a6b100a40aa1560ce27211" + integrity sha512-SdNCUs284hr40hFTFP6l0IfZ/RSrMXF3qgoRHd3/79unUTvrFO/JoXwkGm+5J/Oe3E/b5GsnG330uUNgRpu1PA== + dependencies: + has "^1.0.3" + +is-date-object@^1.0.1: + version "1.0.5" + resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.5.tgz#0841d5536e724c25597bf6ea62e1bd38298df31f" + integrity sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ== + dependencies: + has-tostringtag "^1.0.0" + +is-decimal@^1.0.0: + version "1.0.4" + resolved "https://registry.yarnpkg.com/is-decimal/-/is-decimal-1.0.4.tgz#65a3a5958a1c5b63a706e1b333d7cd9f630d3fa5" + integrity sha512-RGdriMmQQvZ2aqaQq3awNA6dCGtKpiDFcOzrTWrDAT2MiWrKQVPmxLGHl7Y2nNu6led0kEyoX0enY0qXYsv9zw== + +is-docker@^2.0.0, is-docker@^2.1.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/is-docker/-/is-docker-2.2.1.tgz#33eeabe23cfe86f14bde4408a02c0cfb853acdaa" + integrity sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ== + +is-extglob@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" + integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI= + +is-fullwidth-code-point@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" + integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== + +is-generator-fn@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-generator-fn/-/is-generator-fn-2.1.0.tgz#7d140adc389aaf3011a8f2a2a4cfa6faadffb118" + integrity sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ== + +is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1: + version "4.0.3" + resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" + integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== + dependencies: + is-extglob "^2.1.1" + +is-hexadecimal@^1.0.0: + version "1.0.4" + resolved "https://registry.yarnpkg.com/is-hexadecimal/-/is-hexadecimal-1.0.4.tgz#cc35c97588da4bd49a8eedd6bc4082d44dcb23a7" + integrity sha512-gyPJuv83bHMpocVYoqof5VDiZveEoGoFL8m3BXNb2VW8Xs+rz9kqO8LOQ5DH6EsuvilT1ApazU0pyl+ytbPtlw== + +is-interactive@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-interactive/-/is-interactive-1.0.0.tgz#cea6e6ae5c870a7b0a0004070b7b587e0252912e" + integrity sha512-2HvIEKRoqS62guEC+qBjpvRubdX910WCMuJTZ+I9yvqKU2/12eSL549HMwtabb4oupdj2sMP50k+XJfB/8JE6w== + +is-module@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-module/-/is-module-1.0.0.tgz#3258fb69f78c14d5b815d664336b4cffb6441591" + integrity sha1-Mlj7afeMFNW4FdZkM2tM/7ZEFZE= + +is-negative-zero@^2.0.1: + version "2.0.2" + resolved "https://registry.yarnpkg.com/is-negative-zero/-/is-negative-zero-2.0.2.tgz#7bf6f03a28003b8b3965de3ac26f664d765f3150" + integrity sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA== + +is-node-process@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-node-process/-/is-node-process-1.0.1.tgz#4fc7ac3a91e8aac58175fe0578abbc56f2831b23" + integrity sha512-5IcdXuf++TTNt3oGl9EBdkvndXA8gmc4bz/Y+mdEpWh3Mcn/+kOw6hI7LD5CocqJWMzeb0I0ClndRVNdEPuJXQ== + +is-number-object@^1.0.4: + version "1.0.6" + resolved "https://registry.yarnpkg.com/is-number-object/-/is-number-object-1.0.6.tgz#6a7aaf838c7f0686a50b4553f7e54a96494e89f0" + integrity sha512-bEVOqiRcvo3zO1+G2lVMy+gkkEm9Yh7cDMRusKKu5ZJKPUYSJwICTKZrNKHA2EbSP0Tu0+6B/emsYNHZyn6K8g== + dependencies: + has-tostringtag "^1.0.0" + +is-number@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" + integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== + +is-obj@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-obj/-/is-obj-1.0.1.tgz#3e4729ac1f5fde025cd7d83a896dab9f4f67db0f" + integrity sha1-PkcprB9f3gJc19g6iW2rn09n2w8= + +is-path-cwd@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/is-path-cwd/-/is-path-cwd-2.2.0.tgz#67d43b82664a7b5191fd9119127eb300048a9fdb" + integrity sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ== + +is-path-inside@^3.0.2: + version "3.0.3" + resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.3.tgz#d231362e53a07ff2b0e0ea7fed049161ffd16283" + integrity sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ== + +is-plain-obj@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-2.1.0.tgz#45e42e37fccf1f40da8e5f76ee21515840c09287" + integrity sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA== + +is-plain-obj@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-3.0.0.tgz#af6f2ea14ac5a646183a5bbdb5baabbc156ad9d7" + integrity sha512-gwsOE28k+23GP1B6vFl1oVh/WOzmawBrKwo5Ev6wMKzPkaXaCDIQKzLnvsA42DRlbVTWorkgTKIviAKCWkfUwA== + +is-plain-object@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-3.0.1.tgz#662d92d24c0aa4302407b0d45d21f2251c85f85b" + integrity sha512-Xnpx182SBMrr/aBik8y+GuR4U1L9FqMSojwDQwPMmxyC6bvEqly9UBCxhauBF5vNh2gwWJNX6oDV7O+OM4z34g== + +is-potential-custom-element-name@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz#171ed6f19e3ac554394edf78caa05784a45bebb5" + integrity sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ== + +is-reference@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/is-reference/-/is-reference-1.2.1.tgz#8b2dac0b371f4bc994fdeaba9eb542d03002d0b7" + integrity sha512-U82MsXXiFIrjCK4otLT+o2NA2Cd2g5MLoOVXUZjIOhLurrRxpEXzI8O0KZHr3IjLvlAH1kTPYSuqer5T9ZVBKQ== + dependencies: + "@types/estree" "*" + +is-regex@^1.0.4, is-regex@^1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.1.4.tgz#eef5663cd59fa4c0ae339505323df6854bb15958" + integrity sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg== + dependencies: + call-bind "^1.0.2" + has-tostringtag "^1.0.0" + +is-regexp@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-regexp/-/is-regexp-1.0.0.tgz#fd2d883545c46bac5a633e7b9a09e87fa2cb5069" + integrity sha1-/S2INUXEa6xaYz57mgnof6LLUGk= + +is-root@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-root/-/is-root-2.1.0.tgz#809e18129cf1129644302a4f8544035d51984a9c" + integrity sha512-AGOriNp96vNBd3HtU+RzFEc75FfR5ymiYv8E553I71SCeXBiMsVDUtdio1OEFvrPyLIQ9tVR5RxXIFe5PUFjMg== + +is-shared-array-buffer@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-shared-array-buffer/-/is-shared-array-buffer-1.0.1.tgz#97b0c85fbdacb59c9c446fe653b82cf2b5b7cfe6" + integrity sha512-IU0NmyknYZN0rChcKhRO1X8LYz5Isj/Fsqh8NJOSf+N/hCOTwy29F32Ik7a+QszE63IdvmwdTPDd6cZ5pg4cwA== + +is-stream@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077" + integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg== + +is-string@^1.0.5, is-string@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/is-string/-/is-string-1.0.7.tgz#0dd12bf2006f255bb58f695110eff7491eebc0fd" + integrity sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg== + dependencies: + has-tostringtag "^1.0.0" + +is-symbol@^1.0.2, is-symbol@^1.0.3: + version "1.0.4" + resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.4.tgz#a6dac93b635b063ca6872236de88910a57af139c" + integrity sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg== + dependencies: + has-symbols "^1.0.2" + +is-typedarray@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" + integrity sha1-5HnICFjfDBsR3dppQPlgEfzaSpo= + +is-unicode-supported@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz#3f26c76a809593b52bfa2ecb5710ed2779b522a7" + integrity sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw== + +is-weakref@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-weakref/-/is-weakref-1.0.2.tgz#9529f383a9338205e89765e0392efc2f100f06f2" + integrity sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ== + dependencies: + call-bind "^1.0.2" + +is-whitespace-character@^1.0.0: + version "1.0.4" + resolved "https://registry.yarnpkg.com/is-whitespace-character/-/is-whitespace-character-1.0.4.tgz#0858edd94a95594c7c9dd0b5c174ec6e45ee4aa7" + integrity sha512-SDweEzfIZM0SJV0EUga669UTKlmL0Pq8Lno0QDQsPnvECB3IM2aP0gdx5TrU0A01MAPfViaZiI2V1QMZLaKK5w== + +is-word-character@^1.0.0: + version "1.0.4" + resolved "https://registry.yarnpkg.com/is-word-character/-/is-word-character-1.0.4.tgz#ce0e73216f98599060592f62ff31354ddbeb0230" + integrity sha512-5SMO8RVennx3nZrqtKwCGyyetPE9VDba5ugvKLaD4KopPG5kR4mQ7tNt/r7feL5yt5h3lpuBbIUmCOG2eSzXHA== + +is-wsl@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-2.2.0.tgz#74a4c76e77ca9fd3f932f290c17ea326cd157271" + integrity sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww== + dependencies: + is-docker "^2.0.0" + +isarray@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" + integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= + +isexe@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" + integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA= + +istanbul-lib-coverage@^3.0.0, istanbul-lib-coverage@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz#189e7909d0a39fa5a3dfad5b03f71947770191d3" + integrity sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw== + +istanbul-lib-instrument@^5.0.4, istanbul-lib-instrument@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-5.1.0.tgz#7b49198b657b27a730b8e9cb601f1e1bff24c59a" + integrity sha512-czwUz525rkOFDJxfKK6mYfIs9zBKILyrZQxjz3ABhjQXhbhFsSbo1HW/BFcsDnfJYJWA6thRR5/TUY2qs5W99Q== + dependencies: + "@babel/core" "^7.12.3" + "@babel/parser" "^7.14.7" + "@istanbuljs/schema" "^0.1.2" + istanbul-lib-coverage "^3.2.0" + semver "^6.3.0" + +istanbul-lib-report@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#7518fe52ea44de372f460a76b5ecda9ffb73d8a6" + integrity sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw== + dependencies: + istanbul-lib-coverage "^3.0.0" + make-dir "^3.0.0" + supports-color "^7.1.0" + +istanbul-lib-source-maps@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz#895f3a709fcfba34c6de5a42939022f3e4358551" + integrity sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw== + dependencies: + debug "^4.1.1" + istanbul-lib-coverage "^3.0.0" + source-map "^0.6.1" + +istanbul-reports@^3.1.3: + version "3.1.3" + resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-3.1.3.tgz#4bcae3103b94518117930d51283690960b50d3c2" + integrity sha512-x9LtDVtfm/t1GFiLl3NffC7hz+I1ragvgX1P/Lg1NlIagifZDKUkuuaAxH/qpwj2IuEfD8G2Bs/UKp+sZ/pKkg== + dependencies: + html-escaper "^2.0.0" + istanbul-lib-report "^3.0.0" + +jake@^10.6.1: + version "10.8.2" + resolved "https://registry.yarnpkg.com/jake/-/jake-10.8.2.tgz#ebc9de8558160a66d82d0eadc6a2e58fbc500a7b" + integrity sha512-eLpKyrfG3mzvGE2Du8VoPbeSkRry093+tyNjdYaBbJS9v17knImYGNXQCUV0gLxQtF82m3E8iRb/wdSQZLoq7A== + dependencies: + async "0.9.x" + chalk "^2.4.2" + filelist "^1.0.1" + minimatch "^3.0.4" + +jest-changed-files@^27.4.2: + version "27.4.2" + resolved "https://registry.yarnpkg.com/jest-changed-files/-/jest-changed-files-27.4.2.tgz#da2547ea47c6e6a5f6ed336151bd2075736eb4a5" + integrity sha512-/9x8MjekuzUQoPjDHbBiXbNEBauhrPU2ct7m8TfCg69ywt1y/N+yYwGh3gCpnqUS3klYWDU/lSNgv+JhoD2k1A== + dependencies: + "@jest/types" "^27.4.2" + execa "^5.0.0" + throat "^6.0.1" + +jest-circus@^27.4.6: + version "27.4.6" + resolved "https://registry.yarnpkg.com/jest-circus/-/jest-circus-27.4.6.tgz#d3af34c0eb742a967b1919fbb351430727bcea6c" + integrity sha512-UA7AI5HZrW4wRM72Ro80uRR2Fg+7nR0GESbSI/2M+ambbzVuA63mn5T1p3Z/wlhntzGpIG1xx78GP2YIkf6PhQ== + dependencies: + "@jest/environment" "^27.4.6" + "@jest/test-result" "^27.4.6" + "@jest/types" "^27.4.2" + "@types/node" "*" + chalk "^4.0.0" + co "^4.6.0" + dedent "^0.7.0" + expect "^27.4.6" + is-generator-fn "^2.0.0" + jest-each "^27.4.6" + jest-matcher-utils "^27.4.6" + jest-message-util "^27.4.6" + jest-runtime "^27.4.6" + jest-snapshot "^27.4.6" + jest-util "^27.4.2" + pretty-format "^27.4.6" + slash "^3.0.0" + stack-utils "^2.0.3" + throat "^6.0.1" + +jest-cli@^27.4.7: + version "27.4.7" + resolved "https://registry.yarnpkg.com/jest-cli/-/jest-cli-27.4.7.tgz#d00e759e55d77b3bcfea0715f527c394ca314e5a" + integrity sha512-zREYhvjjqe1KsGV15mdnxjThKNDgza1fhDT+iUsXWLCq3sxe9w5xnvyctcYVT5PcdLSjv7Y5dCwTS3FCF1tiuw== + dependencies: + "@jest/core" "^27.4.7" + "@jest/test-result" "^27.4.6" + "@jest/types" "^27.4.2" + chalk "^4.0.0" + exit "^0.1.2" + graceful-fs "^4.2.4" + import-local "^3.0.2" + jest-config "^27.4.7" + jest-util "^27.4.2" + jest-validate "^27.4.6" + prompts "^2.0.1" + yargs "^16.2.0" + +jest-config@^27.4.7: + version "27.4.7" + resolved "https://registry.yarnpkg.com/jest-config/-/jest-config-27.4.7.tgz#4f084b2acbd172c8b43aa4cdffe75d89378d3972" + integrity sha512-xz/o/KJJEedHMrIY9v2ParIoYSrSVY6IVeE4z5Z3i101GoA5XgfbJz+1C8EYPsv7u7f39dS8F9v46BHDhn0vlw== + dependencies: + "@babel/core" "^7.8.0" + "@jest/test-sequencer" "^27.4.6" + "@jest/types" "^27.4.2" + babel-jest "^27.4.6" + chalk "^4.0.0" + ci-info "^3.2.0" + deepmerge "^4.2.2" + glob "^7.1.1" + graceful-fs "^4.2.4" + jest-circus "^27.4.6" + jest-environment-jsdom "^27.4.6" + jest-environment-node "^27.4.6" + jest-get-type "^27.4.0" + jest-jasmine2 "^27.4.6" + jest-regex-util "^27.4.0" + jest-resolve "^27.4.6" + jest-runner "^27.4.6" + jest-util "^27.4.2" + jest-validate "^27.4.6" + micromatch "^4.0.4" + pretty-format "^27.4.6" + slash "^3.0.0" + +jest-diff@^27.0.0, jest-diff@^27.4.6: + version "27.4.6" + resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-27.4.6.tgz#93815774d2012a2cbb6cf23f84d48c7a2618f98d" + integrity sha512-zjaB0sh0Lb13VyPsd92V7HkqF6yKRH9vm33rwBt7rPYrpQvS1nCvlIy2pICbKta+ZjWngYLNn4cCK4nyZkjS/w== + dependencies: + chalk "^4.0.0" + diff-sequences "^27.4.0" + jest-get-type "^27.4.0" + pretty-format "^27.4.6" + +jest-docblock@^27.4.0: + version "27.4.0" + resolved "https://registry.yarnpkg.com/jest-docblock/-/jest-docblock-27.4.0.tgz#06c78035ca93cbbb84faf8fce64deae79a59f69f" + integrity sha512-7TBazUdCKGV7svZ+gh7C8esAnweJoG+SvcF6Cjqj4l17zA2q1cMwx2JObSioubk317H+cjcHgP+7fTs60paulg== + dependencies: + detect-newline "^3.0.0" + +jest-each@^27.4.6: + version "27.4.6" + resolved "https://registry.yarnpkg.com/jest-each/-/jest-each-27.4.6.tgz#e7e8561be61d8cc6dbf04296688747ab186c40ff" + integrity sha512-n6QDq8y2Hsmn22tRkgAk+z6MCX7MeVlAzxmZDshfS2jLcaBlyhpF3tZSJLR+kXmh23GEvS0ojMR8i6ZeRvpQcA== + dependencies: + "@jest/types" "^27.4.2" + chalk "^4.0.0" + jest-get-type "^27.4.0" + jest-util "^27.4.2" + pretty-format "^27.4.6" + +jest-environment-jsdom@^27.4.6: + version "27.4.6" + resolved "https://registry.yarnpkg.com/jest-environment-jsdom/-/jest-environment-jsdom-27.4.6.tgz#c23a394eb445b33621dfae9c09e4c8021dea7b36" + integrity sha512-o3dx5p/kHPbUlRvSNjypEcEtgs6LmvESMzgRFQE6c+Prwl2JLA4RZ7qAnxc5VM8kutsGRTB15jXeeSbJsKN9iA== + dependencies: + "@jest/environment" "^27.4.6" + "@jest/fake-timers" "^27.4.6" + "@jest/types" "^27.4.2" + "@types/node" "*" + jest-mock "^27.4.6" + jest-util "^27.4.2" + jsdom "^16.6.0" + +jest-environment-node@^27.4.6: + version "27.4.6" + resolved "https://registry.yarnpkg.com/jest-environment-node/-/jest-environment-node-27.4.6.tgz#ee8cd4ef458a0ef09d087c8cd52ca5856df90242" + integrity sha512-yfHlZ9m+kzTKZV0hVfhVu6GuDxKAYeFHrfulmy7Jxwsq4V7+ZK7f+c0XP/tbVDMQW7E4neG2u147hFkuVz0MlQ== + dependencies: + "@jest/environment" "^27.4.6" + "@jest/fake-timers" "^27.4.6" + "@jest/types" "^27.4.2" + "@types/node" "*" + jest-mock "^27.4.6" + jest-util "^27.4.2" + +jest-get-type@^27.4.0: + version "27.4.0" + resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-27.4.0.tgz#7503d2663fffa431638337b3998d39c5e928e9b5" + integrity sha512-tk9o+ld5TWq41DkK14L4wox4s2D9MtTpKaAVzXfr5CUKm5ZK2ExcaFE0qls2W71zE/6R2TxxrK9w2r6svAFDBQ== + +jest-haste-map@^27.4.6: + version "27.4.6" + resolved "https://registry.yarnpkg.com/jest-haste-map/-/jest-haste-map-27.4.6.tgz#c60b5233a34ca0520f325b7e2cc0a0140ad0862a" + integrity sha512-0tNpgxg7BKurZeFkIOvGCkbmOHbLFf4LUQOxrQSMjvrQaQe3l6E8x6jYC1NuWkGo5WDdbr8FEzUxV2+LWNawKQ== + dependencies: + "@jest/types" "^27.4.2" + "@types/graceful-fs" "^4.1.2" + "@types/node" "*" + anymatch "^3.0.3" + fb-watchman "^2.0.0" + graceful-fs "^4.2.4" + jest-regex-util "^27.4.0" + jest-serializer "^27.4.0" + jest-util "^27.4.2" + jest-worker "^27.4.6" + micromatch "^4.0.4" + walker "^1.0.7" + optionalDependencies: + fsevents "^2.3.2" + +jest-jasmine2@^27.4.6: + version "27.4.6" + resolved "https://registry.yarnpkg.com/jest-jasmine2/-/jest-jasmine2-27.4.6.tgz#109e8bc036cb455950ae28a018f983f2abe50127" + integrity sha512-uAGNXF644I/whzhsf7/qf74gqy9OuhvJ0XYp8SDecX2ooGeaPnmJMjXjKt0mqh1Rl5dtRGxJgNrHlBQIBfS5Nw== + dependencies: + "@jest/environment" "^27.4.6" + "@jest/source-map" "^27.4.0" + "@jest/test-result" "^27.4.6" + "@jest/types" "^27.4.2" + "@types/node" "*" + chalk "^4.0.0" + co "^4.6.0" + expect "^27.4.6" + is-generator-fn "^2.0.0" + jest-each "^27.4.6" + jest-matcher-utils "^27.4.6" + jest-message-util "^27.4.6" + jest-runtime "^27.4.6" + jest-snapshot "^27.4.6" + jest-util "^27.4.2" + pretty-format "^27.4.6" + throat "^6.0.1" + +jest-leak-detector@^27.4.6: + version "27.4.6" + resolved "https://registry.yarnpkg.com/jest-leak-detector/-/jest-leak-detector-27.4.6.tgz#ed9bc3ce514b4c582637088d9faf58a33bd59bf4" + integrity sha512-kkaGixDf9R7CjHm2pOzfTxZTQQQ2gHTIWKY/JZSiYTc90bZp8kSZnUMS3uLAfwTZwc0tcMRoEX74e14LG1WapA== + dependencies: + jest-get-type "^27.4.0" + pretty-format "^27.4.6" + +jest-matcher-utils@^27.4.6: + version "27.4.6" + resolved "https://registry.yarnpkg.com/jest-matcher-utils/-/jest-matcher-utils-27.4.6.tgz#53ca7f7b58170638590e946f5363b988775509b8" + integrity sha512-XD4PKT3Wn1LQnRAq7ZsTI0VRuEc9OrCPFiO1XL7bftTGmfNF0DcEwMHRgqiu7NGf8ZoZDREpGrCniDkjt79WbA== + dependencies: + chalk "^4.0.0" + jest-diff "^27.4.6" + jest-get-type "^27.4.0" + pretty-format "^27.4.6" + +jest-message-util@^27.4.6: + version "27.4.6" + resolved "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-27.4.6.tgz#9fdde41a33820ded3127465e1a5896061524da31" + integrity sha512-0p5szriFU0U74czRSFjH6RyS7UYIAkn/ntwMuOwTGWrQIOh5NzXXrq72LOqIkJKKvFbPq+byZKuBz78fjBERBA== + dependencies: + "@babel/code-frame" "^7.12.13" + "@jest/types" "^27.4.2" + "@types/stack-utils" "^2.0.0" + chalk "^4.0.0" + graceful-fs "^4.2.4" + micromatch "^4.0.4" + pretty-format "^27.4.6" + slash "^3.0.0" + stack-utils "^2.0.3" + +jest-mock@^27.4.6: + version "27.4.6" + resolved "https://registry.yarnpkg.com/jest-mock/-/jest-mock-27.4.6.tgz#77d1ba87fbd33ccb8ef1f061697e7341b7635195" + integrity sha512-kvojdYRkst8iVSZ1EJ+vc1RRD9llueBjKzXzeCytH3dMM7zvPV/ULcfI2nr0v0VUgm3Bjt3hBCQvOeaBz+ZTHw== + dependencies: + "@jest/types" "^27.4.2" + "@types/node" "*" + +jest-pnp-resolver@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/jest-pnp-resolver/-/jest-pnp-resolver-1.2.2.tgz#b704ac0ae028a89108a4d040b3f919dfddc8e33c" + integrity sha512-olV41bKSMm8BdnuMsewT4jqlZ8+3TCARAXjZGT9jcoSnrfUnRCqnMoF9XEeoWjbzObpqF9dRhHQj0Xb9QdF6/w== + +jest-regex-util@^27.0.0, jest-regex-util@^27.4.0: + version "27.4.0" + resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-27.4.0.tgz#e4c45b52653128843d07ad94aec34393ea14fbca" + integrity sha512-WeCpMpNnqJYMQoOjm1nTtsgbR4XHAk1u00qDoNBQoykM280+/TmgA5Qh5giC1ecy6a5d4hbSsHzpBtu5yvlbEg== + +jest-resolve-dependencies@^27.4.6: + version "27.4.6" + resolved "https://registry.yarnpkg.com/jest-resolve-dependencies/-/jest-resolve-dependencies-27.4.6.tgz#fc50ee56a67d2c2183063f6a500cc4042b5e2327" + integrity sha512-W85uJZcFXEVZ7+MZqIPCscdjuctruNGXUZ3OHSXOfXR9ITgbUKeHj+uGcies+0SsvI5GtUfTw4dY7u9qjTvQOw== + dependencies: + "@jest/types" "^27.4.2" + jest-regex-util "^27.4.0" + jest-snapshot "^27.4.6" + +jest-resolve@^27.4.2, jest-resolve@^27.4.6: + version "27.4.6" + resolved "https://registry.yarnpkg.com/jest-resolve/-/jest-resolve-27.4.6.tgz#2ec3110655e86d5bfcfa992e404e22f96b0b5977" + integrity sha512-SFfITVApqtirbITKFAO7jOVN45UgFzcRdQanOFzjnbd+CACDoyeX7206JyU92l4cRr73+Qy/TlW51+4vHGt+zw== + dependencies: + "@jest/types" "^27.4.2" + chalk "^4.0.0" + graceful-fs "^4.2.4" + jest-haste-map "^27.4.6" + jest-pnp-resolver "^1.2.2" + jest-util "^27.4.2" + jest-validate "^27.4.6" + resolve "^1.20.0" + resolve.exports "^1.1.0" + slash "^3.0.0" + +jest-runner@^27.4.6: + version "27.4.6" + resolved "https://registry.yarnpkg.com/jest-runner/-/jest-runner-27.4.6.tgz#1d390d276ec417e9b4d0d081783584cbc3e24773" + integrity sha512-IDeFt2SG4DzqalYBZRgbbPmpwV3X0DcntjezPBERvnhwKGWTW7C5pbbA5lVkmvgteeNfdd/23gwqv3aiilpYPg== + dependencies: + "@jest/console" "^27.4.6" + "@jest/environment" "^27.4.6" + "@jest/test-result" "^27.4.6" + "@jest/transform" "^27.4.6" + "@jest/types" "^27.4.2" + "@types/node" "*" + chalk "^4.0.0" + emittery "^0.8.1" + exit "^0.1.2" + graceful-fs "^4.2.4" + jest-docblock "^27.4.0" + jest-environment-jsdom "^27.4.6" + jest-environment-node "^27.4.6" + jest-haste-map "^27.4.6" + jest-leak-detector "^27.4.6" + jest-message-util "^27.4.6" + jest-resolve "^27.4.6" + jest-runtime "^27.4.6" + jest-util "^27.4.2" + jest-worker "^27.4.6" + source-map-support "^0.5.6" + throat "^6.0.1" + +jest-runtime@^27.4.6: + version "27.4.6" + resolved "https://registry.yarnpkg.com/jest-runtime/-/jest-runtime-27.4.6.tgz#83ae923818e3ea04463b22f3597f017bb5a1cffa" + integrity sha512-eXYeoR/MbIpVDrjqy5d6cGCFOYBFFDeKaNWqTp0h6E74dK0zLHzASQXJpl5a2/40euBmKnprNLJ0Kh0LCndnWQ== + dependencies: + "@jest/environment" "^27.4.6" + "@jest/fake-timers" "^27.4.6" + "@jest/globals" "^27.4.6" + "@jest/source-map" "^27.4.0" + "@jest/test-result" "^27.4.6" + "@jest/transform" "^27.4.6" + "@jest/types" "^27.4.2" + chalk "^4.0.0" + cjs-module-lexer "^1.0.0" + collect-v8-coverage "^1.0.0" + execa "^5.0.0" + glob "^7.1.3" + graceful-fs "^4.2.4" + jest-haste-map "^27.4.6" + jest-message-util "^27.4.6" + jest-mock "^27.4.6" + jest-regex-util "^27.4.0" + jest-resolve "^27.4.6" + jest-snapshot "^27.4.6" + jest-util "^27.4.2" + slash "^3.0.0" + strip-bom "^4.0.0" + +jest-serializer@^27.4.0: + version "27.4.0" + resolved "https://registry.yarnpkg.com/jest-serializer/-/jest-serializer-27.4.0.tgz#34866586e1cae2388b7d12ffa2c7819edef5958a" + integrity sha512-RDhpcn5f1JYTX2pvJAGDcnsNTnsV9bjYPU8xcV+xPwOXnUPOQwf4ZEuiU6G9H1UztH+OapMgu/ckEVwO87PwnQ== + dependencies: + "@types/node" "*" + graceful-fs "^4.2.4" + +jest-snapshot@^27.4.6: + version "27.4.6" + resolved "https://registry.yarnpkg.com/jest-snapshot/-/jest-snapshot-27.4.6.tgz#e2a3b4fff8bdce3033f2373b2e525d8b6871f616" + integrity sha512-fafUCDLQfzuNP9IRcEqaFAMzEe7u5BF7mude51wyWv7VRex60WznZIC7DfKTgSIlJa8aFzYmXclmN328aqSDmQ== + dependencies: + "@babel/core" "^7.7.2" + "@babel/generator" "^7.7.2" + "@babel/plugin-syntax-typescript" "^7.7.2" + "@babel/traverse" "^7.7.2" + "@babel/types" "^7.0.0" + "@jest/transform" "^27.4.6" + "@jest/types" "^27.4.2" + "@types/babel__traverse" "^7.0.4" + "@types/prettier" "^2.1.5" + babel-preset-current-node-syntax "^1.0.0" + chalk "^4.0.0" + expect "^27.4.6" + graceful-fs "^4.2.4" + jest-diff "^27.4.6" + jest-get-type "^27.4.0" + jest-haste-map "^27.4.6" + jest-matcher-utils "^27.4.6" + jest-message-util "^27.4.6" + jest-util "^27.4.2" + natural-compare "^1.4.0" + pretty-format "^27.4.6" + semver "^7.3.2" + +jest-util@^27.4.2: + version "27.4.2" + resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-27.4.2.tgz#ed95b05b1adfd761e2cda47e0144c6a58e05a621" + integrity sha512-YuxxpXU6nlMan9qyLuxHaMMOzXAl5aGZWCSzben5DhLHemYQxCc4YK+4L3ZrCutT8GPQ+ui9k5D8rUJoDioMnA== + dependencies: + "@jest/types" "^27.4.2" + "@types/node" "*" + chalk "^4.0.0" + ci-info "^3.2.0" + graceful-fs "^4.2.4" + picomatch "^2.2.3" + +jest-validate@^27.4.6: + version "27.4.6" + resolved "https://registry.yarnpkg.com/jest-validate/-/jest-validate-27.4.6.tgz#efc000acc4697b6cf4fa68c7f3f324c92d0c4f1f" + integrity sha512-872mEmCPVlBqbA5dToC57vA3yJaMRfIdpCoD3cyHWJOMx+SJwLNw0I71EkWs41oza/Er9Zno9XuTkRYCPDUJXQ== + dependencies: + "@jest/types" "^27.4.2" + camelcase "^6.2.0" + chalk "^4.0.0" + jest-get-type "^27.4.0" + leven "^3.1.0" + pretty-format "^27.4.6" + +jest-watch-typeahead@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/jest-watch-typeahead/-/jest-watch-typeahead-1.0.0.tgz#4de2ca1eb596acb1889752afbab84b74fcd99173" + integrity sha512-jxoszalAb394WElmiJTFBMzie/RDCF+W7Q29n5LzOPtcoQoHWfdUtHFkbhgf5NwWe8uMOxvKb/g7ea7CshfkTw== + dependencies: + ansi-escapes "^4.3.1" + chalk "^4.0.0" + jest-regex-util "^27.0.0" + jest-watcher "^27.0.0" + slash "^4.0.0" + string-length "^5.0.1" + strip-ansi "^7.0.1" + +jest-watcher@^27.0.0, jest-watcher@^27.4.6: + version "27.4.6" + resolved "https://registry.yarnpkg.com/jest-watcher/-/jest-watcher-27.4.6.tgz#673679ebeffdd3f94338c24f399b85efc932272d" + integrity sha512-yKQ20OMBiCDigbD0quhQKLkBO+ObGN79MO4nT7YaCuQ5SM+dkBNWE8cZX0FjU6czwMvWw6StWbe+Wv4jJPJ+fw== + dependencies: + "@jest/test-result" "^27.4.6" + "@jest/types" "^27.4.2" + "@types/node" "*" + ansi-escapes "^4.2.1" + chalk "^4.0.0" + jest-util "^27.4.2" + string-length "^4.0.1" + +jest-worker@^26.2.1: + version "26.6.2" + resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-26.6.2.tgz#7f72cbc4d643c365e27b9fd775f9d0eaa9c7a8ed" + integrity sha512-KWYVV1c4i+jbMpaBC+U++4Va0cp8OisU185o73T1vo99hqi7w8tSJfUXYswwqqrjzwxa6KpRK54WhPvwf5w6PQ== + dependencies: + "@types/node" "*" + merge-stream "^2.0.0" + supports-color "^7.0.0" + +jest-worker@^27.0.2, jest-worker@^27.3.1, jest-worker@^27.4.1, jest-worker@^27.4.6: + version "27.4.6" + resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-27.4.6.tgz#5d2d93db419566cb680752ca0792780e71b3273e" + integrity sha512-gHWJF/6Xi5CTG5QCvROr6GcmpIqNYpDJyc8A1h/DyXqH1tD6SnRCM0d3U5msV31D2LB/U+E0M+W4oyvKV44oNw== + dependencies: + "@types/node" "*" + merge-stream "^2.0.0" + supports-color "^8.0.0" + +jest@^27.4.3: + version "27.4.7" + resolved "https://registry.yarnpkg.com/jest/-/jest-27.4.7.tgz#87f74b9026a1592f2da05b4d258e57505f28eca4" + integrity sha512-8heYvsx7nV/m8m24Vk26Y87g73Ba6ueUd0MWed/NXMhSZIm62U/llVbS0PJe1SHunbyXjJ/BqG1z9bFjGUIvTg== + dependencies: + "@jest/core" "^27.4.7" + import-local "^3.0.2" + jest-cli "^27.4.7" + +js-levenshtein@^1.1.6: + version "1.1.6" + resolved "https://registry.yarnpkg.com/js-levenshtein/-/js-levenshtein-1.1.6.tgz#c6cee58eb3550372df8deb85fad5ce66ce01d59d" + integrity sha512-X2BB11YZtrRqY4EnQcLX5Rh373zbK4alC1FW7D7MBhL2gtcC17cTnr6DmfHZeS0s2rTHjUTMMHfG7gO8SSdw+g== + +js-sha3@0.8.0: + version "0.8.0" + resolved "https://registry.yarnpkg.com/js-sha3/-/js-sha3-0.8.0.tgz#b9b7a5da73afad7dedd0f8c463954cbde6818840" + integrity sha512-gF1cRrHhIzNfToc802P800N8PpXS+evLLXfsVpowqmAFR9uwbi89WvXg2QspOmXL8QL86J4T1EpFu+yUkwJY3Q== + +"js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" + integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== + +js-yaml@^3.13.1: + version "3.14.1" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" + integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== + dependencies: + argparse "^1.0.7" + esprima "^4.0.0" + +js-yaml@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602" + integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA== + dependencies: + argparse "^2.0.1" + +jsdom@^16.6.0: + version "16.7.0" + resolved "https://registry.yarnpkg.com/jsdom/-/jsdom-16.7.0.tgz#918ae71965424b197c819f8183a754e18977b710" + integrity sha512-u9Smc2G1USStM+s/x1ru5Sxrl6mPYCbByG1U/hUmqaVsm4tbNyS7CicOSRyuGQYZhTu0h84qkZZQ/I+dzizSVw== + dependencies: + abab "^2.0.5" + acorn "^8.2.4" + acorn-globals "^6.0.0" + cssom "^0.4.4" + cssstyle "^2.3.0" + data-urls "^2.0.0" + decimal.js "^10.2.1" + domexception "^2.0.1" + escodegen "^2.0.0" + form-data "^3.0.0" + html-encoding-sniffer "^2.0.1" + http-proxy-agent "^4.0.1" + https-proxy-agent "^5.0.0" + is-potential-custom-element-name "^1.0.1" + nwsapi "^2.2.0" + parse5 "6.0.1" + saxes "^5.0.1" + symbol-tree "^3.2.4" + tough-cookie "^4.0.0" + w3c-hr-time "^1.0.2" + w3c-xmlserializer "^2.0.0" + webidl-conversions "^6.1.0" + whatwg-encoding "^1.0.5" + whatwg-mimetype "^2.3.0" + whatwg-url "^8.5.0" + ws "^7.4.6" + xml-name-validator "^3.0.0" + +jsesc@^2.5.1: + version "2.5.2" + resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" + integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== + +jsesc@~0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" + integrity sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0= + +json-parse-better-errors@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz#bb867cfb3450e69107c131d1c514bab3dc8bcaa9" + integrity sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw== + +json-parse-even-better-errors@^2.3.0: + version "2.3.1" + resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d" + integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w== + +json-schema-traverse@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" + integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== + +json-schema-traverse@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz#ae7bcb3656ab77a73ba5c49bf654f38e6b6860e2" + integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug== + +json-schema@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.4.0.tgz#f7de4cf6efab838ebaeb3236474cbba5a1930ab5" + integrity sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA== + +json-stable-stringify-without-jsonify@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" + integrity sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE= + +json5@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.1.tgz#779fb0018604fa854eacbf6252180d83543e3dbe" + integrity sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow== + dependencies: + minimist "^1.2.0" + +json5@^2.1.2, json5@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.0.tgz#2dfefe720c6ba525d9ebd909950f0515316c89a3" + integrity sha512-f+8cldu7X/y7RAJurMEJmdoKXGB/X550w2Nr3tTbezL6RwEE/iMcm+tZnXeoZtKuOq6ft8+CqzEkrIgx1fPoQA== + dependencies: + minimist "^1.2.5" + +jsonfile@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-4.0.0.tgz#8771aae0799b64076b76640fca058f9c10e33ecb" + integrity sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss= + optionalDependencies: + graceful-fs "^4.1.6" + +jsonfile@^6.0.1: + version "6.1.0" + resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-6.1.0.tgz#bc55b2634793c679ec6403094eb13698a6ec0aae" + integrity sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ== + dependencies: + universalify "^2.0.0" + optionalDependencies: + graceful-fs "^4.1.6" + +jsonpointer@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/jsonpointer/-/jsonpointer-5.0.0.tgz#f802669a524ec4805fa7389eadbc9921d5dc8072" + integrity sha512-PNYZIdMjVIvVgDSYKTT63Y+KZ6IZvGRNNWcxwD+GNnUz1MKPfv30J8ueCjdwcN0nDx2SlshgyB7Oy0epAzVRRg== + +"jsx-ast-utils@^2.4.1 || ^3.0.0", jsx-ast-utils@^3.2.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/jsx-ast-utils/-/jsx-ast-utils-3.2.1.tgz#720b97bfe7d901b927d87c3773637ae8ea48781b" + integrity sha512-uP5vu8xfy2F9A6LGC22KO7e2/vGTS1MhP+18f++ZNlf0Ohaxbc9nIEwHAsejlJKyzfZzU5UIhe5ItYkitcZnZA== + dependencies: + array-includes "^3.1.3" + object.assign "^4.1.2" + +kind-of@^6.0.2: + version "6.0.3" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd" + integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw== + +kleur@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e" + integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w== + +klona@^2.0.4, klona@^2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/klona/-/klona-2.0.5.tgz#d166574d90076395d9963aa7a928fabb8d76afbc" + integrity sha512-pJiBpiXMbt7dkzXe8Ghj/u4FfXOOa98fPW+bihOJ4SjnoijweJrNThJfd3ifXpXhREjpoF2mZVH1GfS9LV3kHQ== + +language-subtag-registry@~0.3.2: + version "0.3.21" + resolved "https://registry.yarnpkg.com/language-subtag-registry/-/language-subtag-registry-0.3.21.tgz#04ac218bea46f04cb039084602c6da9e788dd45a" + integrity sha512-L0IqwlIXjilBVVYKFT37X9Ih11Um5NEl9cbJIuU/SwP/zEEAbBPOnEeeuxVMf45ydWQRDQN3Nqc96OgbH1K+Pg== + +language-tags@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/language-tags/-/language-tags-1.0.5.tgz#d321dbc4da30ba8bf3024e040fa5c14661f9193a" + integrity sha1-0yHbxNowuovzAk4ED6XBRmH5GTo= + dependencies: + language-subtag-registry "~0.3.2" + +leven@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2" + integrity sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A== + +levn@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade" + integrity sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ== + dependencies: + prelude-ls "^1.2.1" + type-check "~0.4.0" + +levn@~0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" + integrity sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4= + dependencies: + prelude-ls "~1.1.2" + type-check "~0.3.2" + +lilconfig@^2.0.3, lilconfig@^2.0.4: + version "2.0.4" + resolved "https://registry.yarnpkg.com/lilconfig/-/lilconfig-2.0.4.tgz#f4507d043d7058b380b6a8f5cb7bcd4b34cee082" + integrity sha512-bfTIN7lEsiooCocSISTWXkiWJkRqtL9wYtYy+8EK3Y41qh3mpwPU0ycTOgjdY9ErwXCc8QyrQp82bdL0Xkm9yA== + +lines-and-columns@^1.1.6: + version "1.2.4" + resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.2.4.tgz#eca284f75d2965079309dc0ad9255abb2ebc1632" + integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg== + +loader-runner@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/loader-runner/-/loader-runner-4.2.0.tgz#d7022380d66d14c5fb1d496b89864ebcfd478384" + integrity sha512-92+huvxMvYlMzMt0iIOukcwYBFpkYJdpl2xsZ7LrlayO7E8SOv+JJUEK17B/dJIHAOLMfh2dZZ/Y18WgmGtYNw== + +loader-utils@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.4.0.tgz#c579b5e34cb34b1a74edc6c1fb36bfa371d5a613" + integrity sha512-qH0WSMBtn/oHuwjy/NucEgbx5dbxxnxup9s4PVXJUDHZBQY+s0NWA9rJf53RBnQZxfch7euUui7hpoAPvALZdA== + dependencies: + big.js "^5.2.2" + emojis-list "^3.0.0" + json5 "^1.0.1" + +loader-utils@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-2.0.2.tgz#d6e3b4fb81870721ae4e0868ab11dd638368c129" + integrity sha512-TM57VeHptv569d/GKh6TAYdzKblwDNiumOdkFnejjD0XwTH87K90w3O7AiJRqdQoXygvi1VQTJTLGhJl7WqA7A== + dependencies: + big.js "^5.2.2" + emojis-list "^3.0.0" + json5 "^2.1.2" + +loader-utils@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-3.2.0.tgz#bcecc51a7898bee7473d4bc6b845b23af8304d4f" + integrity sha512-HVl9ZqccQihZ7JM85dco1MvO9G+ONvxoGa9rkhzFsneGLKSUg1gJf9bWzhRhcvm2qChhWpebQhP44qxjKIUCaQ== + +locate-path@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-2.0.0.tgz#2b568b265eec944c6d9c0de9c3dbbbca0354cd8e" + integrity sha1-K1aLJl7slExtnA3pw9u7ygNUzY4= + dependencies: + p-locate "^2.0.0" + path-exists "^3.0.0" + +locate-path@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e" + integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A== + dependencies: + p-locate "^3.0.0" + path-exists "^3.0.0" + +locate-path@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0" + integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g== + dependencies: + p-locate "^4.1.0" + +locate-path@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-6.0.0.tgz#55321eb309febbc59c4801d931a72452a681d286" + integrity sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw== + dependencies: + p-locate "^5.0.0" + +lodash.debounce@^4.0.8: + version "4.0.8" + resolved "https://registry.yarnpkg.com/lodash.debounce/-/lodash.debounce-4.0.8.tgz#82d79bff30a67c4005ffd5e2515300ad9ca4d7af" + integrity sha1-gteb/zCmfEAF/9XiUVMArZyk168= + +lodash.memoize@^4.1.2: + version "4.1.2" + resolved "https://registry.yarnpkg.com/lodash.memoize/-/lodash.memoize-4.1.2.tgz#bcc6c49a42a2840ed997f323eada5ecd182e0bfe" + integrity sha1-vMbEmkKihA7Zl/Mj6tpezRguC/4= + +lodash.merge@^4.6.2: + version "4.6.2" + resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" + integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== + +lodash.sortby@^4.7.0: + version "4.7.0" + resolved "https://registry.yarnpkg.com/lodash.sortby/-/lodash.sortby-4.7.0.tgz#edd14c824e2cc9c1e0b0a1b42bb5210516a42438" + integrity sha1-7dFMgk4sycHgsKG0K7UhBRakJDg= + +lodash.uniq@^4.5.0: + version "4.5.0" + resolved "https://registry.yarnpkg.com/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773" + integrity sha1-0CJTc662Uq3BvILklFM5qEJ1R3M= + +lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17.20, lodash@^4.17.21, lodash@^4.7.0: + version "4.17.21" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" + integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== + +log-symbols@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-4.1.0.tgz#3fbdbb95b4683ac9fc785111e792e558d4abd503" + integrity sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg== + dependencies: + chalk "^4.1.0" + is-unicode-supported "^0.1.0" + +loose-envify@^1.0.0, loose-envify@^1.1.0, loose-envify@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" + integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== + dependencies: + js-tokens "^3.0.0 || ^4.0.0" + +lower-case@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/lower-case/-/lower-case-2.0.2.tgz#6fa237c63dbdc4a82ca0fd882e4722dc5e634e28" + integrity sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg== + dependencies: + tslib "^2.0.3" + +lru-cache@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" + integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== + dependencies: + yallist "^4.0.0" + +lz-string@^1.4.4: + version "1.4.4" + resolved "https://registry.yarnpkg.com/lz-string/-/lz-string-1.4.4.tgz#c0d8eaf36059f705796e1e344811cf4c498d3a26" + integrity sha1-wNjq82BZ9wV5bh40SBHPTEmNOiY= + +magic-string@^0.25.0, magic-string@^0.25.7: + version "0.25.7" + resolved "https://registry.yarnpkg.com/magic-string/-/magic-string-0.25.7.tgz#3f497d6fd34c669c6798dcb821f2ef31f5445051" + integrity sha512-4CrMT5DOHTDk4HYDlzmwu4FVCcIYI8gauveasrdCu2IKIFOJ3f0v/8MDGJCDL9oD2ppz/Av1b0Nj345H9M+XIA== + dependencies: + sourcemap-codec "^1.4.4" + +make-dir@^3.0.0, make-dir@^3.0.2, make-dir@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f" + integrity sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw== + dependencies: + semver "^6.0.0" + +makeerror@1.0.12: + version "1.0.12" + resolved "https://registry.yarnpkg.com/makeerror/-/makeerror-1.0.12.tgz#3e5dd2079a82e812e983cc6610c4a2cb0eaa801a" + integrity sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg== + dependencies: + tmpl "1.0.5" + +markdown-escapes@^1.0.0: + version "1.0.4" + resolved "https://registry.yarnpkg.com/markdown-escapes/-/markdown-escapes-1.0.4.tgz#c95415ef451499d7602b91095f3c8e8975f78535" + integrity sha512-8z4efJYk43E0upd0NbVXwgSTQs6cT3T06etieCMEg7dRbzCbxUCK/GHlX8mhHRDcp+OLlHkPKsvqQTCvsRl2cg== + +match-sorter@^6.0.2: + version "6.3.1" + resolved "https://registry.yarnpkg.com/match-sorter/-/match-sorter-6.3.1.tgz#98cc37fda756093424ddf3cbc62bfe9c75b92bda" + integrity sha512-mxybbo3pPNuA+ZuCUhm5bwNkXrJTbsk5VWbR5wiwz/GC6LIiegBGn2w3O08UG/jdbYLinw51fSQ5xNU1U3MgBw== + dependencies: + "@babel/runtime" "^7.12.5" + remove-accents "0.4.2" + +mdast-util-definitions@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/mdast-util-definitions/-/mdast-util-definitions-4.0.0.tgz#c5c1a84db799173b4dcf7643cda999e440c24db2" + integrity sha512-k8AJ6aNnUkB7IE+5azR9h81O5EQ/cTDXtWdMq9Kk5KcEW/8ritU5CeLg/9HhOC++nALHBlaogJ5jz0Ybk3kPMQ== + dependencies: + unist-util-visit "^2.0.0" + +mdast-util-to-hast@^10.0.0, mdast-util-to-hast@^10.2.0: + version "10.2.0" + resolved "https://registry.yarnpkg.com/mdast-util-to-hast/-/mdast-util-to-hast-10.2.0.tgz#61875526a017d8857b71abc9333942700b2d3604" + integrity sha512-JoPBfJ3gBnHZ18icCwHR50orC9kNH81tiR1gs01D8Q5YpV6adHNO9nKNuFBCJQ941/32PT1a63UF/DitmS3amQ== + dependencies: + "@types/mdast" "^3.0.0" + "@types/unist" "^2.0.0" + mdast-util-definitions "^4.0.0" + mdurl "^1.0.0" + unist-builder "^2.0.0" + unist-util-generated "^1.0.0" + unist-util-position "^3.0.0" + unist-util-visit "^2.0.0" + +mdn-data@2.0.14: + version "2.0.14" + resolved "https://registry.yarnpkg.com/mdn-data/-/mdn-data-2.0.14.tgz#7113fc4281917d63ce29b43446f701e68c25ba50" + integrity sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow== + +mdn-data@2.0.4: + version "2.0.4" + resolved "https://registry.yarnpkg.com/mdn-data/-/mdn-data-2.0.4.tgz#699b3c38ac6f1d728091a64650b65d388502fd5b" + integrity sha512-iV3XNKw06j5Q7mi6h+9vbx23Tv7JkjEVgKHW4pimwyDGWm0OIQntJJ+u1C6mg6mK1EaTv42XQ7w76yuzH7M2cA== + +mdn-data@~1.1.0: + version "1.1.4" + resolved "https://registry.yarnpkg.com/mdn-data/-/mdn-data-1.1.4.tgz#50b5d4ffc4575276573c4eedb8780812a8419f01" + integrity sha512-FSYbp3lyKjyj3E7fMl6rYvUdX0FBXaluGqlFoYESWQlyUTq8R+wp0rkFxoYFqZlHCvsUXGjyJmLQSnXToYhOSA== + +mdurl@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/mdurl/-/mdurl-1.0.1.tgz#fe85b2ec75a59037f2adfec100fd6c601761152e" + integrity sha1-/oWy7HWlkDfyrf7BAP1sYBdhFS4= + +media-typer@0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" + integrity sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g= + +memfs@^3.1.2, memfs@^3.2.2: + version "3.4.1" + resolved "https://registry.yarnpkg.com/memfs/-/memfs-3.4.1.tgz#b78092f466a0dce054d63d39275b24c71d3f1305" + integrity sha512-1c9VPVvW5P7I85c35zAdEr1TD5+F11IToIHIlrVIcflfnzPkJa0ZoYEoEdYDP8KgPFoSZ/opDrUsAoZWym3mtw== + dependencies: + fs-monkey "1.0.3" + +"memoize-one@>=3.1.1 <6", memoize-one@^5.1.1: + version "5.2.1" + resolved "https://registry.yarnpkg.com/memoize-one/-/memoize-one-5.2.1.tgz#8337aa3c4335581839ec01c3d594090cebe8f00e" + integrity sha512-zYiwtZUcYyXKo/np96AGZAckk+FWWsUdJ3cHGGmld7+AhvcWmQyGCYUh1hc4Q/pkOhb65dQR/pqCyK0cOaHz4Q== + +merge-descriptors@1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" + integrity sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E= + +merge-stream@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60" + integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w== + +merge2@^1.2.3, merge2@^1.3.0, merge2@^1.4.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" + integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== + +methods@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" + integrity sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4= + +micromatch@^4.0.2, micromatch@^4.0.4: + version "4.0.4" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.4.tgz#896d519dfe9db25fce94ceb7a500919bf881ebf9" + integrity sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg== + dependencies: + braces "^3.0.1" + picomatch "^2.2.3" + +microseconds@0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/microseconds/-/microseconds-0.2.0.tgz#233b25f50c62a65d861f978a4a4f8ec18797dc39" + integrity sha512-n7DHHMjR1avBbSpsTBj6fmMGh2AGrifVV4e+WYc3Q9lO+xnSZ3NyhcBND3vzzatt05LFhoKFRxrIyklmLlUtyA== + +mime-db@1.51.0, "mime-db@>= 1.43.0 < 2": + version "1.51.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.51.0.tgz#d9ff62451859b18342d960850dc3cfb77e63fb0c" + integrity sha512-5y8A56jg7XVQx2mbv1lu49NR4dokRnhZYTtL+KGfaa27uq4pSTXkwQkFJl4pkRMyNFz/EtYDSkiiEHx3F7UN6g== + +mime-types@^2.1.12, mime-types@^2.1.27, mime-types@^2.1.31, mime-types@~2.1.17, mime-types@~2.1.24: + version "2.1.34" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.34.tgz#5a712f9ec1503511a945803640fafe09d3793c24" + integrity sha512-6cP692WwGIs9XXdOO4++N+7qjqv0rqxxVvJ3VHPh/Sc9mVZcQP+ZGhkKiTvWMQRr2tbHkJP/Yn7Y0npb3ZBs4A== + dependencies: + mime-db "1.51.0" + +mime@1.6.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" + integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== + +mimic-fn@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" + integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== + +min-indent@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/min-indent/-/min-indent-1.0.1.tgz#a63f681673b30571fbe8bc25686ae746eefa9869" + integrity sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg== + +mini-css-extract-plugin@^2.4.5: + version "2.5.3" + resolved "https://registry.yarnpkg.com/mini-css-extract-plugin/-/mini-css-extract-plugin-2.5.3.tgz#c5c79f9b22ce9b4f164e9492267358dbe35376d9" + integrity sha512-YseMB8cs8U/KCaAGQoqYmfUuhhGW0a9p9XvWXrxVOkE3/IiISTLw4ALNt7JR5B2eYauFM+PQGSbXMDmVbR7Tfw== + dependencies: + schema-utils "^4.0.0" + +minimalistic-assert@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" + integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== + +minimatch@3.0.4, minimatch@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" + integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== + dependencies: + brace-expansion "^1.1.7" + +minimatch@^3.0.2: + version "3.1.2" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" + integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== + dependencies: + brace-expansion "^1.1.7" + +minimist@^1.1.1, minimist@^1.2.0, minimist@^1.2.5: + version "1.2.6" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.6.tgz#8637a5b759ea0d6e98702cfb3a9283323c93af44" + integrity sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q== + +mkdirp@^0.5.5, mkdirp@~0.5.1: + version "0.5.5" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def" + integrity sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ== + dependencies: + minimist "^1.2.5" + +moment@^2.29.1: + version "2.29.2" + resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.2.tgz#00910c60b20843bcba52d37d58c628b47b1f20e4" + integrity sha512-UgzG4rvxYpN15jgCmVJwac49h9ly9NurikMWGPdVxm8GZD6XjkKPxDTjQQ43gtGgnV3X0cAyWDdP2Wexoquifg== + +ms@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" + integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g= + +ms@2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" + integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== + +ms@2.1.3, ms@^2.1.1: + version "2.1.3" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" + integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== + +msw@^0.36.8: + version "0.36.8" + resolved "https://registry.yarnpkg.com/msw/-/msw-0.36.8.tgz#33ff8bfb0299626a95f43d0e4c3dc2c73c17f1ba" + integrity sha512-K7lOQoYqhGhTSChsmHMQbf/SDCsxh/m0uhN6Ipt206lGoe81fpTmaGD0KLh4jUxCONMOUnwCSj0jtX2CM4pEdw== + dependencies: + "@mswjs/cookies" "^0.1.7" + "@mswjs/interceptors" "^0.12.7" + "@open-draft/until" "^1.0.3" + "@types/cookie" "^0.4.1" + "@types/inquirer" "^8.1.3" + "@types/js-levenshtein" "^1.1.0" + chalk "4.1.1" + chokidar "^3.4.2" + cookie "^0.4.1" + graphql "^15.5.1" + headers-utils "^3.0.2" + inquirer "^8.2.0" + is-node-process "^1.0.1" + js-levenshtein "^1.1.6" + node-fetch "^2.6.7" + path-to-regexp "^6.2.0" + statuses "^2.0.0" + strict-event-emitter "^0.2.0" + type-fest "^1.2.2" + yargs "^17.3.0" + +multicast-dns-service-types@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/multicast-dns-service-types/-/multicast-dns-service-types-1.1.0.tgz#899f11d9686e5e05cb91b35d5f0e63b773cfc901" + integrity sha1-iZ8R2WhuXgXLkbNdXw5jt3PPyQE= + +multicast-dns@^6.0.1: + version "6.2.3" + resolved "https://registry.yarnpkg.com/multicast-dns/-/multicast-dns-6.2.3.tgz#a0ec7bd9055c4282f790c3c82f4e28db3b31b229" + integrity sha512-ji6J5enbMyGRHIAkAOu3WdV8nggqviKCEKtXcOqfphZZtQrmHKycfynJ2V7eVPUA4NhJ6V7Wf4TmGbTwKE9B6g== + dependencies: + dns-packet "^1.3.1" + thunky "^1.0.2" + +mute-stream@0.0.8: + version "0.0.8" + resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.8.tgz#1630c42b2251ff81e2a283de96a5497ea92e5e0d" + integrity sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA== + +nano-time@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/nano-time/-/nano-time-1.0.0.tgz#b0554f69ad89e22d0907f7a12b0993a5d96137ef" + integrity sha1-sFVPaa2J4i0JB/ehKwmTpdlhN+8= + dependencies: + big-integer "^1.6.16" + +nanoid@^3.1.30: + version "3.2.0" + resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.2.0.tgz#62667522da6673971cca916a6d3eff3f415ff80c" + integrity sha512-fmsZYa9lpn69Ad5eDn7FMcnnSR+8R34W9qJEijxYhTbfOWzr22n1QxCMzXLK+ODyW2973V3Fux959iQoUxzUIA== + +natural-compare@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" + integrity sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc= + +negotiator@0.6.2: + version "0.6.2" + resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.2.tgz#feacf7ccf525a77ae9634436a64883ffeca346fb" + integrity sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw== + +neo-async@^2.6.2: + version "2.6.2" + resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" + integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== + +no-case@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/no-case/-/no-case-3.0.4.tgz#d361fd5c9800f558551a8369fc0dcd4662b6124d" + integrity sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg== + dependencies: + lower-case "^2.0.2" + tslib "^2.0.3" + +node-emoji@^1.10.0: + version "1.11.0" + resolved "https://registry.yarnpkg.com/node-emoji/-/node-emoji-1.11.0.tgz#69a0150e6946e2f115e9d7ea4df7971e2628301c" + integrity sha512-wo2DpQkQp7Sjm2A0cq+sN7EHKO6Sl0ctXeBdFZrL9T9+UywORbufTcTZxom8YqpLQt/FqNMUkOpkZrJVYSKD3A== + dependencies: + lodash "^4.17.21" + +node-fetch@^2.6.7: + version "2.6.7" + resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad" + integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ== + dependencies: + whatwg-url "^5.0.0" + +node-forge@^1.2.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-1.3.0.tgz#37a874ea723855f37db091e6c186e5b67a01d4b2" + integrity sha512-08ARB91bUi6zNKzVmaj3QO7cr397uiDT2nJ63cHjyNtCTWIgvS47j3eT0WfzUwS9+6Z5YshRaoasFkXCKrIYbA== + +node-int64@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b" + integrity sha1-h6kGXNs1XTGC2PlM4RGIuCXGijs= + +node-releases@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.1.tgz#3d1d395f204f1f2f29a54358b9fb678765ad2fc5" + integrity sha512-CqyzN6z7Q6aMeF/ktcMVTzhAHCEpf8SOarwpzpf8pNBY2k5/oM34UHldUwp8VKI7uxct2HxSRdJjBaZeESzcxA== + +normalize-path@^3.0.0, normalize-path@~3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" + integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== + +normalize-range@^0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/normalize-range/-/normalize-range-0.1.2.tgz#2d10c06bdfd312ea9777695a4d28439456b75942" + integrity sha1-LRDAa9/TEuqXd2laTShDlFa3WUI= + +normalize-url@^6.0.1: + version "6.1.0" + resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-6.1.0.tgz#40d0885b535deffe3f3147bec877d05fe4c5668a" + integrity sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A== + +npm-run-path@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-4.0.1.tgz#b7ecd1e5ed53da8e37a55e1c2269e0b97ed748ea" + integrity sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw== + dependencies: + path-key "^3.0.0" + +nth-check@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-1.0.2.tgz#b2bd295c37e3dd58a3bf0700376663ba4d9cf05c" + integrity sha512-WeBOdju8SnzPN5vTUJYxYUxLeXpCaVP5i5e0LF8fg7WORF2Wd7wFX/pk0tYZk7s8T+J7VLy0Da6J1+wCT0AtHg== + dependencies: + boolbase "~1.0.0" + +nth-check@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-2.0.1.tgz#2efe162f5c3da06a28959fbd3db75dbeea9f0fc2" + integrity sha512-it1vE95zF6dTT9lBsYbxvqh0Soy4SPowchj0UBGj/V6cTPnXXtQOPUbhZ6CmGzAD/rW22LQK6E96pcdJXk4A4w== + dependencies: + boolbase "^1.0.0" + +numeral@^2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/numeral/-/numeral-2.0.6.tgz#4ad080936d443c2561aed9f2197efffe25f4e506" + integrity sha1-StCAk21EPCVhrtnyGX7//iX05QY= + +nwsapi@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/nwsapi/-/nwsapi-2.2.0.tgz#204879a9e3d068ff2a55139c2c772780681a38b7" + integrity sha512-h2AatdwYH+JHiZpv7pt/gSX1XoRGb7L/qSIeuqA6GwYoF9w1vP1cw42TO0aI2pNyshRK5893hNSl+1//vHK7hQ== + +object-assign@^4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" + integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM= + +object-hash@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/object-hash/-/object-hash-2.2.0.tgz#5ad518581eefc443bd763472b8ff2e9c2c0d54a5" + integrity sha512-gScRMn0bS5fH+IuwyIFgnh9zBdo4DV+6GhygmWM9HyNJSgS0hScp1f5vjtm7oIIOiT9trXrShAkLFSc2IqKNgw== + +object-inspect@^1.11.0, object-inspect@^1.9.0: + version "1.12.0" + resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.0.tgz#6e2c120e868fd1fd18cb4f18c31741d0d6e776f0" + integrity sha512-Ho2z80bVIvJloH+YzRmpZVQe87+qASmBUKZDWgx9cu+KDrX2ZDH/3tMy+gXbZETVGs2M8YdxObOh7XAtim9Y0g== + +object-is@^1.0.1: + version "1.1.5" + resolved "https://registry.yarnpkg.com/object-is/-/object-is-1.1.5.tgz#b9deeaa5fc7f1846a0faecdceec138e5778f53ac" + integrity sha512-3cyDsyHgtmi7I7DfSSI2LDp6SK2lwvtbg0p0R1e0RvTqF5ceGx+K2dfSjm1bKDMVCFEDAQvy+o8c6a7VujOddw== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + +object-keys@^1.0.12, object-keys@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" + integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== + +object.assign@^4.1.0, object.assign@^4.1.2: + version "4.1.2" + resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.2.tgz#0ed54a342eceb37b38ff76eb831a0e788cb63940" + integrity sha512-ixT2L5THXsApyiUPYKmW+2EHpXXe5Ii3M+f4e+aJFAHao5amFRW6J0OO6c/LU8Be47utCx2GL89hxGB6XSmKuQ== + dependencies: + call-bind "^1.0.0" + define-properties "^1.1.3" + has-symbols "^1.0.1" + object-keys "^1.1.1" + +object.entries@^1.1.5: + version "1.1.5" + resolved "https://registry.yarnpkg.com/object.entries/-/object.entries-1.1.5.tgz#e1acdd17c4de2cd96d5a08487cfb9db84d881861" + integrity sha512-TyxmjUoZggd4OrrU1W66FMDG6CuqJxsFvymeyXI51+vQLN67zYfZseptRge703kKQdo4uccgAKebXFcRCzk4+g== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.1" + +object.fromentries@^2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/object.fromentries/-/object.fromentries-2.0.5.tgz#7b37b205109c21e741e605727fe8b0ad5fa08251" + integrity sha512-CAyG5mWQRRiBU57Re4FKoTBjXfDoNwdFVH2Y1tS9PqCsfUTymAohOkEMSG3aRNKmv4lV3O7p1et7c187q6bynw== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.1" + +object.getownpropertydescriptors@^2.1.0: + version "2.1.3" + resolved "https://registry.yarnpkg.com/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.3.tgz#b223cf38e17fefb97a63c10c91df72ccb386df9e" + integrity sha512-VdDoCwvJI4QdC6ndjpqFmoL3/+HxffFBbcJzKi5hwLLqqx3mdbedRpfZDdK0SrOSauj8X4GzBvnDZl4vTN7dOw== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.1" + +object.hasown@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/object.hasown/-/object.hasown-1.1.0.tgz#7232ed266f34d197d15cac5880232f7a4790afe5" + integrity sha512-MhjYRfj3GBlhSkDHo6QmvgjRLXQ2zndabdf3nX0yTyZK9rPfxb6uRpAac8HXNLy1GpqWtZ81Qh4v3uOls2sRAg== + dependencies: + define-properties "^1.1.3" + es-abstract "^1.19.1" + +object.values@^1.1.0, object.values@^1.1.5: + version "1.1.5" + resolved "https://registry.yarnpkg.com/object.values/-/object.values-1.1.5.tgz#959f63e3ce9ef108720333082131e4a459b716ac" + integrity sha512-QUZRW0ilQ3PnPpbNtgdNV1PDbEqLIiSFB3l+EnGtBQ/8SUTLj1PZwtQHABZtLgwpJZTSZhuGLOGk57Drx2IvYg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.1" + +oblivious-set@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/oblivious-set/-/oblivious-set-1.0.0.tgz#c8316f2c2fb6ff7b11b6158db3234c49f733c566" + integrity sha512-z+pI07qxo4c2CulUHCDf9lcqDlMSo72N/4rLUpRXf6fu+q8vjt8y0xS+Tlf8NTJDdTXHbdeO1n3MlbctwEoXZw== + +obuf@^1.0.0, obuf@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/obuf/-/obuf-1.1.2.tgz#09bea3343d41859ebd446292d11c9d4db619084e" + integrity sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg== + +on-finished@~2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.3.0.tgz#20f1336481b083cd75337992a16971aa2d906947" + integrity sha1-IPEzZIGwg811M3mSoWlxqi2QaUc= + dependencies: + ee-first "1.1.1" + +on-headers@~1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/on-headers/-/on-headers-1.0.2.tgz#772b0ae6aaa525c399e489adfad90c403eb3c28f" + integrity sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA== + +once@^1.3.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= + dependencies: + wrappy "1" + +onetime@^5.1.0, onetime@^5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e" + integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg== + dependencies: + mimic-fn "^2.1.0" + +open@^8.0.9, open@^8.4.0: + version "8.4.0" + resolved "https://registry.yarnpkg.com/open/-/open-8.4.0.tgz#345321ae18f8138f82565a910fdc6b39e8c244f8" + integrity sha512-XgFPPM+B28FtCCgSb9I+s9szOC1vZRSwgWsRUA5ylIxRTgKozqjOCrVOqGsYABPYK5qnfqClxZTFBa8PKt2v6Q== + dependencies: + define-lazy-prop "^2.0.0" + is-docker "^2.1.1" + is-wsl "^2.2.0" + +optionator@^0.8.1: + version "0.8.3" + resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495" + integrity sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA== + dependencies: + deep-is "~0.1.3" + fast-levenshtein "~2.0.6" + levn "~0.3.0" + prelude-ls "~1.1.2" + type-check "~0.3.2" + word-wrap "~1.2.3" + +optionator@^0.9.1: + version "0.9.1" + resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.1.tgz#4f236a6373dae0566a6d43e1326674f50c291499" + integrity sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw== + dependencies: + deep-is "^0.1.3" + fast-levenshtein "^2.0.6" + levn "^0.4.1" + prelude-ls "^1.2.1" + type-check "^0.4.0" + word-wrap "^1.2.3" + +ora@^5.4.1: + version "5.4.1" + resolved "https://registry.yarnpkg.com/ora/-/ora-5.4.1.tgz#1b2678426af4ac4a509008e5e4ac9e9959db9e18" + integrity sha512-5b6Y85tPxZZ7QytO+BQzysW31HJku27cRIlkbAXaNx+BdcVi+LlRFmVXzeF6a7JCwJpyw5c4b+YSVImQIrBpuQ== + dependencies: + bl "^4.1.0" + chalk "^4.1.0" + cli-cursor "^3.1.0" + cli-spinners "^2.5.0" + is-interactive "^1.0.0" + is-unicode-supported "^0.1.0" + log-symbols "^4.1.0" + strip-ansi "^6.0.0" + wcwidth "^1.0.1" + +os-tmpdir@~1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" + integrity sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ= + +outvariant@^1.2.0: + version "1.2.1" + resolved "https://registry.yarnpkg.com/outvariant/-/outvariant-1.2.1.tgz#e630f6cdc1dbf398ed857e36f219de4a005ccd35" + integrity sha512-bcILvFkvpMXh66+Ubax/inxbKRyWTUiiFIW2DWkiS79wakrLGn3Ydy+GvukadiyfZjaL6C7YhIem4EZSM282wA== + +p-limit@^1.1.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-1.3.0.tgz#b86bd5f0c25690911c7590fcbfc2010d54b3ccb8" + integrity sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q== + dependencies: + p-try "^1.0.0" + +p-limit@^2.0.0, p-limit@^2.2.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" + integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== + dependencies: + p-try "^2.0.0" + +p-limit@^3.0.2: + version "3.1.0" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b" + integrity sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ== + dependencies: + yocto-queue "^0.1.0" + +p-locate@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-2.0.0.tgz#20a0103b222a70c8fd39cc2e580680f3dde5ec43" + integrity sha1-IKAQOyIqcMj9OcwuWAaA893l7EM= + dependencies: + p-limit "^1.1.0" + +p-locate@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4" + integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ== + dependencies: + p-limit "^2.0.0" + +p-locate@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07" + integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A== + dependencies: + p-limit "^2.2.0" + +p-locate@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-5.0.0.tgz#83c8315c6785005e3bd021839411c9e110e6d834" + integrity sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw== + dependencies: + p-limit "^3.0.2" + +p-map@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/p-map/-/p-map-4.0.0.tgz#bb2f95a5eda2ec168ec9274e06a747c3e2904d2b" + integrity sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ== + dependencies: + aggregate-error "^3.0.0" + +p-retry@^4.5.0: + version "4.6.1" + resolved "https://registry.yarnpkg.com/p-retry/-/p-retry-4.6.1.tgz#8fcddd5cdf7a67a0911a9cf2ef0e5df7f602316c" + integrity sha512-e2xXGNhZOZ0lfgR9kL34iGlU8N/KO0xZnQxVEwdeOvpqNDQfdnxIYizvWtK8RglUa3bGqI8g0R/BdfzLMxRkiA== + dependencies: + "@types/retry" "^0.12.0" + retry "^0.13.1" + +p-try@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/p-try/-/p-try-1.0.0.tgz#cbc79cdbaf8fd4228e13f621f2b1a237c1b207b3" + integrity sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M= + +p-try@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" + integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== + +param-case@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/param-case/-/param-case-3.0.4.tgz#7d17fe4aa12bde34d4a77d91acfb6219caad01c5" + integrity sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A== + dependencies: + dot-case "^3.0.4" + tslib "^2.0.3" + +parent-module@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2" + integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g== + dependencies: + callsites "^3.0.0" + +parse-entities@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/parse-entities/-/parse-entities-2.0.0.tgz#53c6eb5b9314a1f4ec99fa0fdf7ce01ecda0cbe8" + integrity sha512-kkywGpCcRYhqQIchaWqZ875wzpS/bMKhz5HnN3p7wveJTkTtyAB/AlnS0f8DFSqYW1T82t6yEAkEcB+A1I3MbQ== + dependencies: + character-entities "^1.0.0" + character-entities-legacy "^1.0.0" + character-reference-invalid "^1.0.0" + is-alphanumerical "^1.0.0" + is-decimal "^1.0.0" + is-hexadecimal "^1.0.0" + +parse-json@^5.0.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-5.2.0.tgz#c76fc66dee54231c962b22bcc8a72cf2f99753cd" + integrity sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg== + dependencies: + "@babel/code-frame" "^7.0.0" + error-ex "^1.3.1" + json-parse-even-better-errors "^2.3.0" + lines-and-columns "^1.1.6" + +parse5@6.0.1, parse5@^6.0.0: + version "6.0.1" + resolved "https://registry.yarnpkg.com/parse5/-/parse5-6.0.1.tgz#e1a1c085c569b3dc08321184f19a39cc27f7c30b" + integrity sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw== + +parseurl@~1.3.2, parseurl@~1.3.3: + version "1.3.3" + resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" + integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== + +pascal-case@^3.1.2: + version "3.1.2" + resolved "https://registry.yarnpkg.com/pascal-case/-/pascal-case-3.1.2.tgz#b48e0ef2b98e205e7c1dae747d0b1508237660eb" + integrity sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g== + dependencies: + no-case "^3.0.4" + tslib "^2.0.3" + +path-exists@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" + integrity sha1-zg6+ql94yxiSXqfYENe1mwEP1RU= + +path-exists@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3" + integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w== + +path-is-absolute@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" + integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18= + +path-key@^3.0.0, path-key@^3.1.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" + integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== + +path-parse@^1.0.6, path-parse@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" + integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== + +path-to-regexp@0.1.7: + version "0.1.7" + resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" + integrity sha1-32BBeABfUi8V60SQ5yR6G/qmf4w= + +path-to-regexp@^6.2.0: + version "6.2.0" + resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-6.2.0.tgz#f7b3803336104c346889adece614669230645f38" + integrity sha512-f66KywYG6+43afgE/8j/GoiNyygk/bnoCbps++3ErRKsIYkGGupyv07R2Ok5m9i67Iqc+T2g1eAUGUPzWhYTyg== + +path-type@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" + integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== + +performance-now@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" + integrity sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns= + +picocolors@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-0.2.1.tgz#570670f793646851d1ba135996962abad587859f" + integrity sha512-cMlDqaLEqfSaW8Z7N5Jw+lyIW869EzT73/F5lhtY9cLGoVxSXznfgfXMO0Z5K0o0Q2TkTXq+0KFsdnSe3jDViA== + +picocolors@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" + integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== + +picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.2, picomatch@^2.2.3: + version "2.3.1" + resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" + integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== + +pirates@^4.0.4: + version "4.0.5" + resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.5.tgz#feec352ea5c3268fb23a37c702ab1699f35a5f3b" + integrity sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ== + +pkg-dir@^4.1.0, pkg-dir@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3" + integrity sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ== + dependencies: + find-up "^4.0.0" + +pkg-up@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/pkg-up/-/pkg-up-3.1.0.tgz#100ec235cc150e4fd42519412596a28512a0def5" + integrity sha512-nDywThFk1i4BQK4twPQ6TA4RT8bDY96yeuCVBWL3ePARCiEKDRSrNGbFIgUJpLp+XeIR65v8ra7WuJOFUBtkMA== + dependencies: + find-up "^3.0.0" + +portfinder@^1.0.28: + version "1.0.28" + resolved "https://registry.yarnpkg.com/portfinder/-/portfinder-1.0.28.tgz#67c4622852bd5374dd1dd900f779f53462fac778" + integrity sha512-Se+2isanIcEqf2XMHjyUKskczxbPH7dQnlMjXX6+dybayyHvAf/TCgyMRlzf/B6QDhAEFOGes0pzRo3by4AbMA== + dependencies: + async "^2.6.2" + debug "^3.1.1" + mkdirp "^0.5.5" + +postcss-attribute-case-insensitive@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/postcss-attribute-case-insensitive/-/postcss-attribute-case-insensitive-5.0.0.tgz#39cbf6babf3ded1e4abf37d09d6eda21c644105c" + integrity sha512-b4g9eagFGq9T5SWX4+USfVyjIb3liPnjhHHRMP7FMB2kFVpYyfEscV0wP3eaXhKlcHKUut8lt5BGoeylWA/dBQ== + dependencies: + postcss-selector-parser "^6.0.2" + +postcss-browser-comments@^4: + version "4.0.0" + resolved "https://registry.yarnpkg.com/postcss-browser-comments/-/postcss-browser-comments-4.0.0.tgz#bcfc86134df5807f5d3c0eefa191d42136b5e72a" + integrity sha512-X9X9/WN3KIvY9+hNERUqX9gncsgBA25XaeR+jshHz2j8+sYyHktHw1JdKuMjeLpGktXidqDhA7b/qm1mrBDmgg== + +postcss-calc@^8.2.0: + version "8.2.3" + resolved "https://registry.yarnpkg.com/postcss-calc/-/postcss-calc-8.2.3.tgz#53b95ce93de19213c2a5fdd71277a81690ef41d0" + integrity sha512-EGM2EBBWqP57N0E7N7WOLT116PJ39dwHVU01WO4XPPQLJfkL2xVgkMZ+TZvCfapj/uJH07UEfKHQNPHzSw/14Q== + dependencies: + postcss-selector-parser "^6.0.2" + postcss-value-parser "^4.0.2" + +postcss-color-functional-notation@^4.2.1: + version "4.2.1" + resolved "https://registry.yarnpkg.com/postcss-color-functional-notation/-/postcss-color-functional-notation-4.2.1.tgz#a25e9e1855e14d04319222a689f120b3240d39e0" + integrity sha512-62OBIXCjRXpQZcFOYIXwXBlpAVWrYk8ek1rcjvMING4Q2cf0ipyN9qT+BhHA6HmftGSEnFQu2qgKO3gMscl3Rw== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-color-hex-alpha@^8.0.2: + version "8.0.2" + resolved "https://registry.yarnpkg.com/postcss-color-hex-alpha/-/postcss-color-hex-alpha-8.0.2.tgz#7a248b006dd47bd83063f662352d31fd982f74ec" + integrity sha512-gyx8RgqSmGVK156NAdKcsfkY3KPGHhKqvHTL3hhveFrBBToguKFzhyiuk3cljH6L4fJ0Kv+JENuPXs1Wij27Zw== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-color-rebeccapurple@^7.0.2: + version "7.0.2" + resolved "https://registry.yarnpkg.com/postcss-color-rebeccapurple/-/postcss-color-rebeccapurple-7.0.2.tgz#5d397039424a58a9ca628762eb0b88a61a66e079" + integrity sha512-SFc3MaocHaQ6k3oZaFwH8io6MdypkUtEy/eXzXEB1vEQlO3S3oDc/FSZA8AsS04Z25RirQhlDlHLh3dn7XewWw== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-colormin@^5.2.4: + version "5.2.4" + resolved "https://registry.yarnpkg.com/postcss-colormin/-/postcss-colormin-5.2.4.tgz#7726d3f3d24f111d39faff50a6500688225d5324" + integrity sha512-rYlC5015aNqVQt/B6Cy156g7sH5tRUJGmT9xeagYthtKehetbKx7jHxhyLpulP4bs4vbp8u/B2rac0J7S7qPQg== + dependencies: + browserslist "^4.16.6" + caniuse-api "^3.0.0" + colord "^2.9.1" + postcss-value-parser "^4.2.0" + +postcss-convert-values@^5.0.3: + version "5.0.3" + resolved "https://registry.yarnpkg.com/postcss-convert-values/-/postcss-convert-values-5.0.3.tgz#492db08a28af84d57651f10edc8f6c8fb2f6df40" + integrity sha512-fVkjHm2T0PSMqXUCIhHNWVGjhB9mHEWX2GboVs7j3iCgr6FpIl9c/IdXy0PHWZSQ9LFTRgmj98amxJE6KOnlsA== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-custom-media@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/postcss-custom-media/-/postcss-custom-media-8.0.0.tgz#1be6aff8be7dc9bf1fe014bde3b71b92bb4552f1" + integrity sha512-FvO2GzMUaTN0t1fBULDeIvxr5IvbDXcIatt6pnJghc736nqNgsGao5NT+5+WVLAQiTt6Cb3YUms0jiPaXhL//g== + +postcss-custom-properties@^12.1.2: + version "12.1.3" + resolved "https://registry.yarnpkg.com/postcss-custom-properties/-/postcss-custom-properties-12.1.3.tgz#8e37651c7188e72e6762eeae8db39755e84d3a64" + integrity sha512-rtu3otIeY532PnEuuBrIIe+N+pcdbX/7JMZfrcL09wc78YayrHw5E8UkDfvnlOhEUrI4ptCuzXQfj+Or6spbGA== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-custom-selectors@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/postcss-custom-selectors/-/postcss-custom-selectors-6.0.0.tgz#022839e41fbf71c47ae6e316cb0e6213012df5ef" + integrity sha512-/1iyBhz/W8jUepjGyu7V1OPcGbc636snN1yXEQCinb6Bwt7KxsiU7/bLQlp8GwAXzCh7cobBU5odNn/2zQWR8Q== + dependencies: + postcss-selector-parser "^6.0.4" + +postcss-dir-pseudo-class@^6.0.3: + version "6.0.3" + resolved "https://registry.yarnpkg.com/postcss-dir-pseudo-class/-/postcss-dir-pseudo-class-6.0.3.tgz#febfe305e75267913a53bf5094c7679f5cfa9b55" + integrity sha512-qiPm+CNAlgXiMf0J5IbBBEXA9l/Q5HGsNGkL3znIwT2ZFRLGY9U2fTUpa4lqCUXQOxaLimpacHeQC80BD2qbDw== + dependencies: + postcss-selector-parser "^6.0.8" + +postcss-discard-comments@^5.0.2: + version "5.0.2" + resolved "https://registry.yarnpkg.com/postcss-discard-comments/-/postcss-discard-comments-5.0.2.tgz#811ed34e2b6c40713daab0beb4d7a04125927dcd" + integrity sha512-6VQ3pYTsJHEsN2Bic88Aa7J/Brn4Bv8j/rqaFQZkH+pcVkKYwxCIvoMQkykEW7fBjmofdTnQgcivt5CCBJhtrg== + +postcss-discard-duplicates@^5.0.2: + version "5.0.2" + resolved "https://registry.yarnpkg.com/postcss-discard-duplicates/-/postcss-discard-duplicates-5.0.2.tgz#61076f3d256351bdaac8e20aade730fef0609f44" + integrity sha512-LKY81YjUjc78p6rbXIsnppsaFo8XzCoMZkXVILJU//sK0DgPkPSpuq/cZvHss3EtdKvWNYgWzQL+wiJFtEET4g== + +postcss-discard-empty@^5.0.2: + version "5.0.2" + resolved "https://registry.yarnpkg.com/postcss-discard-empty/-/postcss-discard-empty-5.0.2.tgz#0676a9bcfc44bb00d338352a45ab80845a31d8f0" + integrity sha512-SxBsbTjlsKUvZLL+dMrdWauuNZU8TBq5IOL/DHa6jBUSXFEwmDqeXRfTIK/FQpPTa8MJMxEHjSV3UbiuyLARPQ== + +postcss-discard-overridden@^5.0.3: + version "5.0.3" + resolved "https://registry.yarnpkg.com/postcss-discard-overridden/-/postcss-discard-overridden-5.0.3.tgz#004b9818cabb407e60616509267567150b327a3f" + integrity sha512-yRTXknIZA4k8Yo4FiF1xbsLj/VBxfXEWxJNIrtIy6HC9KQ4xJxcPtoaaskh6QptCGrrcGnhKsTsENTRPZOBu4g== + +postcss-double-position-gradients@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/postcss-double-position-gradients/-/postcss-double-position-gradients-3.0.4.tgz#2484b9785ef3ba81b0f03a279c52ec58fc5344c2" + integrity sha512-qz+s5vhKJlsHw8HjSs+HVk2QGFdRyC68KGRQGX3i+GcnUjhWhXQEmCXW6siOJkZ1giu0ddPwSO6I6JdVVVPoog== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-env-function@^4.0.4: + version "4.0.4" + resolved "https://registry.yarnpkg.com/postcss-env-function/-/postcss-env-function-4.0.4.tgz#4e85359ca4fcdde4ec4b73752a41de818dbe91cc" + integrity sha512-0ltahRTPtXSIlEZFv7zIvdEib7HN0ZbUQxrxIKn8KbiRyhALo854I/CggU5lyZe6ZBvSTJ6Al2vkZecI2OhneQ== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-flexbugs-fixes@^5.0.2: + version "5.0.2" + resolved "https://registry.yarnpkg.com/postcss-flexbugs-fixes/-/postcss-flexbugs-fixes-5.0.2.tgz#2028e145313074fc9abe276cb7ca14e5401eb49d" + integrity sha512-18f9voByak7bTktR2QgDveglpn9DTbBWPUzSOe9g0N4WR/2eSt6Vrcbf0hmspvMI6YWGywz6B9f7jzpFNJJgnQ== + +postcss-focus-visible@^6.0.3: + version "6.0.3" + resolved "https://registry.yarnpkg.com/postcss-focus-visible/-/postcss-focus-visible-6.0.3.tgz#14635b71a6b9140f488f11f26cbc9965a13f6843" + integrity sha512-ozOsg+L1U8S+rxSHnJJiET6dNLyADcPHhEarhhtCI9DBLGOPG/2i4ddVoFch9LzrBgb8uDaaRI4nuid2OM82ZA== + dependencies: + postcss-selector-parser "^6.0.8" + +postcss-focus-within@^5.0.3: + version "5.0.3" + resolved "https://registry.yarnpkg.com/postcss-focus-within/-/postcss-focus-within-5.0.3.tgz#0b0bf425f14a646bbfd973b463e2d20d85a3a841" + integrity sha512-fk9y2uFS6/Kpp7/A9Hz9Z4rlFQ8+tzgBcQCXAFSrXFGAbKx+4ZZOmmfHuYjCOMegPWoz0pnC6fNzi8j7Xyqp5Q== + dependencies: + postcss-selector-parser "^6.0.8" + +postcss-font-variant@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/postcss-font-variant/-/postcss-font-variant-5.0.0.tgz#efd59b4b7ea8bb06127f2d031bfbb7f24d32fa66" + integrity sha512-1fmkBaCALD72CK2a9i468mA/+tr9/1cBxRRMXOUaZqO43oWPR5imcyPjXwuv7PXbCid4ndlP5zWhidQVVa3hmA== + +postcss-gap-properties@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/postcss-gap-properties/-/postcss-gap-properties-3.0.2.tgz#562fbf43a6a721565b3ca0e01008690991d2f726" + integrity sha512-EaMy/pbxtQnKDsnbEjdqlkCkROTQZzolcLKgIE+3b7EuJfJydH55cZeHfm+MtIezXRqhR80VKgaztO/vHq94Fw== + +postcss-image-set-function@^4.0.4: + version "4.0.4" + resolved "https://registry.yarnpkg.com/postcss-image-set-function/-/postcss-image-set-function-4.0.4.tgz#ce91579ab2c1386d412ff5cd5e733c474b1f75ee" + integrity sha512-BlEo9gSTj66lXjRNByvkMK9dEdEGFXRfGjKRi9fo8s0/P3oEk74cAoonl/utiM50E2OPVb/XSu+lWvdW4KtE/Q== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-initial@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/postcss-initial/-/postcss-initial-4.0.1.tgz#529f735f72c5724a0fb30527df6fb7ac54d7de42" + integrity sha512-0ueD7rPqX8Pn1xJIjay0AZeIuDoF+V+VvMt/uOnn+4ezUKhZM/NokDeP6DwMNyIoYByuN/94IQnt5FEkaN59xQ== + +postcss-js@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/postcss-js/-/postcss-js-4.0.0.tgz#31db79889531b80dc7bc9b0ad283e418dce0ac00" + integrity sha512-77QESFBwgX4irogGVPgQ5s07vLvFqWr228qZY+w6lW599cRlK/HmnlivnnVUxkjHnCu4J16PDMHcH+e+2HbvTQ== + dependencies: + camelcase-css "^2.0.1" + +postcss-lab-function@^4.0.3: + version "4.0.3" + resolved "https://registry.yarnpkg.com/postcss-lab-function/-/postcss-lab-function-4.0.3.tgz#633745b324afbcd5881da85fe2cef58b17487536" + integrity sha512-MH4tymWmefdZQ7uVG/4icfLjAQmH6o2NRYyVh2mKoB4RXJp9PjsyhZwhH4ouaCQHvg+qJVj3RzeAR1EQpIlXZA== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-load-config@^3.1.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/postcss-load-config/-/postcss-load-config-3.1.1.tgz#2f53a17f2f543d9e63864460af42efdac0d41f87" + integrity sha512-c/9XYboIbSEUZpiD1UQD0IKiUe8n9WHYV7YFe7X7J+ZwCsEKkUJSFWjS9hBU1RR9THR7jMXst8sxiqP0jjo2mg== + dependencies: + lilconfig "^2.0.4" + yaml "^1.10.2" + +postcss-loader@^6.2.1: + version "6.2.1" + resolved "https://registry.yarnpkg.com/postcss-loader/-/postcss-loader-6.2.1.tgz#0895f7346b1702103d30fdc66e4d494a93c008ef" + integrity sha512-WbbYpmAaKcux/P66bZ40bpWsBucjx/TTgVVzRZ9yUO8yQfVBlameJ0ZGVaPfH64hNSBh63a+ICP5nqOpBA0w+Q== + dependencies: + cosmiconfig "^7.0.0" + klona "^2.0.5" + semver "^7.3.5" + +postcss-logical@^5.0.3: + version "5.0.3" + resolved "https://registry.yarnpkg.com/postcss-logical/-/postcss-logical-5.0.3.tgz#9934e0fb16af70adbd94217b24d2f315ceb5c2f0" + integrity sha512-P5NcHWYrif0vK8rgOy/T87vg0WRIj3HSknrvp1wzDbiBeoDPVmiVRmkown2eSQdpPveat/MC1ess5uhzZFVnqQ== + +postcss-media-minmax@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/postcss-media-minmax/-/postcss-media-minmax-5.0.0.tgz#7140bddec173e2d6d657edbd8554a55794e2a5b5" + integrity sha512-yDUvFf9QdFZTuCUg0g0uNSHVlJ5X1lSzDZjPSFaiCWvjgsvu8vEVxtahPrLMinIDEEGnx6cBe6iqdx5YWz08wQ== + +postcss-merge-longhand@^5.0.5: + version "5.0.5" + resolved "https://registry.yarnpkg.com/postcss-merge-longhand/-/postcss-merge-longhand-5.0.5.tgz#cbc217ca22fb5a3e6ee22a6a1aa6920ec1f3c628" + integrity sha512-R2BCPJJ/U2oh1uTWEYn9CcJ7MMcQ1iIbj9wfr2s/zHu5om5MP/ewKdaunpfJqR1WYzqCsgnXuRoVXPAzxdqy8g== + dependencies: + postcss-value-parser "^4.2.0" + stylehacks "^5.0.2" + +postcss-merge-rules@^5.0.5: + version "5.0.5" + resolved "https://registry.yarnpkg.com/postcss-merge-rules/-/postcss-merge-rules-5.0.5.tgz#2a18669ec214019884a60f0a0d356803a8138366" + integrity sha512-3Oa26/Pb9VOFVksJjFG45SNoe4nhGvJ2Uc6TlRimqF8uhfOCEhVCaJ3rvEat5UFOn2UZqTY5Da8dFgCh3Iq0Ug== + dependencies: + browserslist "^4.16.6" + caniuse-api "^3.0.0" + cssnano-utils "^3.0.1" + postcss-selector-parser "^6.0.5" + +postcss-minify-font-values@^5.0.3: + version "5.0.3" + resolved "https://registry.yarnpkg.com/postcss-minify-font-values/-/postcss-minify-font-values-5.0.3.tgz#48c455c4cd980ecd07ac9bf3fc58e9d8a2ae4168" + integrity sha512-bC45rVzEwsLhv/cL1eCjoo2OOjbSk9I7HKFBYnBvtyuIZlf7uMipMATXtA0Fc3jwPo3wuPIW1jRJWKzflMh1sA== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-minify-gradients@^5.0.5: + version "5.0.5" + resolved "https://registry.yarnpkg.com/postcss-minify-gradients/-/postcss-minify-gradients-5.0.5.tgz#a5572b9c98ed52cbd7414db24b873f8b9e418290" + integrity sha512-/YjvXs8PepsoiZAIpjstOO4IHKwFAqYNqbA1yVdqklM84tbUUneh6omJxGlRlF3mi6K5Pa067Mg6IwqEnYC8Zg== + dependencies: + colord "^2.9.1" + cssnano-utils "^3.0.1" + postcss-value-parser "^4.2.0" + +postcss-minify-params@^5.0.4: + version "5.0.4" + resolved "https://registry.yarnpkg.com/postcss-minify-params/-/postcss-minify-params-5.0.4.tgz#230a4d04456609e614db1d48c2eebc21f6490a45" + integrity sha512-Z0vjod9lRZEmEPfEmA2sCfjbfEEFKefMD3RDIQSUfXK4LpCyWkX1CniUgyNvnjJFLDPSxtgKzozhHhPHKoeGkg== + dependencies: + browserslist "^4.16.6" + cssnano-utils "^3.0.1" + postcss-value-parser "^4.2.0" + +postcss-minify-selectors@^5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/postcss-minify-selectors/-/postcss-minify-selectors-5.1.2.tgz#bc9698f713b9dab7f44f1ec30643fcbad9a043c0" + integrity sha512-gpn1nJDMCf3g32y/7kl+jsdamhiYT+/zmEt57RoT9GmzlixBNRPohI7k8UIHelLABhdLf3MSZhtM33xuH5eQOQ== + dependencies: + postcss-selector-parser "^6.0.5" + +postcss-modules-extract-imports@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/postcss-modules-extract-imports/-/postcss-modules-extract-imports-3.0.0.tgz#cda1f047c0ae80c97dbe28c3e76a43b88025741d" + integrity sha512-bdHleFnP3kZ4NYDhuGlVK+CMrQ/pqUm8bx/oGL93K6gVwiclvX5x0n76fYMKuIGKzlABOy13zsvqjb0f92TEXw== + +postcss-modules-local-by-default@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/postcss-modules-local-by-default/-/postcss-modules-local-by-default-4.0.0.tgz#ebbb54fae1598eecfdf691a02b3ff3b390a5a51c" + integrity sha512-sT7ihtmGSF9yhm6ggikHdV0hlziDTX7oFoXtuVWeDd3hHObNkcHRo9V3yg7vCAY7cONyxJC/XXCmmiHHcvX7bQ== + dependencies: + icss-utils "^5.0.0" + postcss-selector-parser "^6.0.2" + postcss-value-parser "^4.1.0" + +postcss-modules-scope@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/postcss-modules-scope/-/postcss-modules-scope-3.0.0.tgz#9ef3151456d3bbfa120ca44898dfca6f2fa01f06" + integrity sha512-hncihwFA2yPath8oZ15PZqvWGkWf+XUfQgUGamS4LqoP1anQLOsOJw0vr7J7IwLpoY9fatA2qiGUGmuZL0Iqlg== + dependencies: + postcss-selector-parser "^6.0.4" + +postcss-modules-values@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/postcss-modules-values/-/postcss-modules-values-4.0.0.tgz#d7c5e7e68c3bb3c9b27cbf48ca0bb3ffb4602c9c" + integrity sha512-RDxHkAiEGI78gS2ofyvCsu7iycRv7oqw5xMWn9iMoR0N/7mf9D50ecQqUo5BZ9Zh2vH4bCUR/ktCqbB9m8vJjQ== + dependencies: + icss-utils "^5.0.0" + +postcss-nested@5.0.6: + version "5.0.6" + resolved "https://registry.yarnpkg.com/postcss-nested/-/postcss-nested-5.0.6.tgz#466343f7fc8d3d46af3e7dba3fcd47d052a945bc" + integrity sha512-rKqm2Fk0KbA8Vt3AdGN0FB9OBOMDVajMG6ZCf/GoHgdxUJ4sBFp0A/uMIRm+MJUdo33YXEtjqIz8u7DAp8B7DA== + dependencies: + postcss-selector-parser "^6.0.6" + +postcss-nesting@^10.1.2: + version "10.1.2" + resolved "https://registry.yarnpkg.com/postcss-nesting/-/postcss-nesting-10.1.2.tgz#2e5f811b3d75602ea18a95dd445bde5297145141" + integrity sha512-dJGmgmsvpzKoVMtDMQQG/T6FSqs6kDtUDirIfl4KnjMCiY9/ETX8jdKyCd20swSRAbUYkaBKV20pxkzxoOXLqQ== + dependencies: + postcss-selector-parser "^6.0.8" + +postcss-normalize-charset@^5.0.2: + version "5.0.2" + resolved "https://registry.yarnpkg.com/postcss-normalize-charset/-/postcss-normalize-charset-5.0.2.tgz#eb6130c8a8e950ce25f9ea512de1d9d6a6f81439" + integrity sha512-fEMhYXzO8My+gC009qDc/3bgnFP8Fv1Ic8uw4ec4YTlhIOw63tGPk1YFd7fk9bZUf1DAbkhiL/QPWs9JLqdF2g== + +postcss-normalize-display-values@^5.0.2: + version "5.0.2" + resolved "https://registry.yarnpkg.com/postcss-normalize-display-values/-/postcss-normalize-display-values-5.0.2.tgz#8b5273c6c7d0a445e6ef226b8a5bb3204a55fb99" + integrity sha512-RxXoJPUR0shSjkMMzgEZDjGPrgXUVYyWA/YwQRicb48H15OClPuaDR7tYokLAlGZ2tCSENEN5WxjgxSD5m4cUw== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-normalize-positions@^5.0.3: + version "5.0.3" + resolved "https://registry.yarnpkg.com/postcss-normalize-positions/-/postcss-normalize-positions-5.0.3.tgz#b63fcc4ff5fbf65934fafaf83270b2da214711d1" + integrity sha512-U+rmhjrNBvIGYqr/1tD4wXPFFMKUbXsYXvlUCzLi0tOCUS6LoeEAnmVXXJY/MEB/1CKZZwBSs2tmzGawcygVBA== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-normalize-repeat-style@^5.0.3: + version "5.0.3" + resolved "https://registry.yarnpkg.com/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-5.0.3.tgz#488c0ad8aac0fa4f66ef56cc8d604b3fd9bf705f" + integrity sha512-uk1+xYx0AMbA3nLSNhbDrqbf/rx+Iuq5tVad2VNyaxxJzx79oGieJ6D9F6AfOL2GtiIbP7vTYlpYHtG+ERFXTg== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-normalize-string@^5.0.3: + version "5.0.3" + resolved "https://registry.yarnpkg.com/postcss-normalize-string/-/postcss-normalize-string-5.0.3.tgz#49e0a1d58a119d5435ef21893ad03136a6e8f0e6" + integrity sha512-Mf2V4JbIDboNGQhW6xW0YREDiYXoX3WrD3EjKkjvnpAJ6W4qqjLnK/c9aioyVFaWWHVdP5zVRw/9DI5S3oLDFw== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-normalize-timing-functions@^5.0.2: + version "5.0.2" + resolved "https://registry.yarnpkg.com/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-5.0.2.tgz#db4f4f49721f47667afd1fdc5edb032f8d9cdb2e" + integrity sha512-Ao0PP6MoYsRU1LxeVUW740ioknvdIUmfr6uAA3xWlQJ9s69/Tupy8qwhuKG3xWfl+KvLMAP9p2WXF9cwuk/7Bg== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-normalize-unicode@^5.0.3: + version "5.0.3" + resolved "https://registry.yarnpkg.com/postcss-normalize-unicode/-/postcss-normalize-unicode-5.0.3.tgz#10f0d30093598a58c48a616491cc7fa53256dd43" + integrity sha512-uNC7BmS/7h6to2UWa4RFH8sOTzu2O9dVWPE/F9Vm9GdhONiD/c1kNaCLbmsFHlKWcEx7alNUChQ+jH/QAlqsQw== + dependencies: + browserslist "^4.16.6" + postcss-value-parser "^4.2.0" + +postcss-normalize-url@^5.0.4: + version "5.0.4" + resolved "https://registry.yarnpkg.com/postcss-normalize-url/-/postcss-normalize-url-5.0.4.tgz#3b0322c425e31dd275174d0d5db0e466f50810fb" + integrity sha512-cNj3RzK2pgQQyNp7dzq0dqpUpQ/wYtdDZM3DepPmFjCmYIfceuD9VIAcOdvrNetjIU65g1B4uwdP/Krf6AFdXg== + dependencies: + normalize-url "^6.0.1" + postcss-value-parser "^4.2.0" + +postcss-normalize-whitespace@^5.0.3: + version "5.0.3" + resolved "https://registry.yarnpkg.com/postcss-normalize-whitespace/-/postcss-normalize-whitespace-5.0.3.tgz#fb6bcc9ff2f834448b802657c7acd0956f4591d1" + integrity sha512-333JWRnX655fSoUbufJ10HJop3c8mrpKkCCUnEmgz/Cb/QEtW+/TMZwDAUt4lnwqP6tCCk0x0b58jqvDgiQm/A== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-normalize@^10.0.1: + version "10.0.1" + resolved "https://registry.yarnpkg.com/postcss-normalize/-/postcss-normalize-10.0.1.tgz#464692676b52792a06b06880a176279216540dd7" + integrity sha512-+5w18/rDev5mqERcG3W5GZNMJa1eoYYNGo8gB7tEwaos0ajk3ZXAI4mHGcNT47NE+ZnZD1pEpUOFLvltIwmeJA== + dependencies: + "@csstools/normalize.css" "*" + postcss-browser-comments "^4" + sanitize.css "*" + +postcss-ordered-values@^5.0.4: + version "5.0.4" + resolved "https://registry.yarnpkg.com/postcss-ordered-values/-/postcss-ordered-values-5.0.4.tgz#f799dca87a7f17526d31a20085e61768d0b00534" + integrity sha512-taKtGDZtyYUMVYkg+MuJeBUiTF6cGHZmo/qcW7ibvW79UlyKuSHbo6dpCIiqI+j9oJsXWzP+ovIxoyLDOeQFdw== + dependencies: + cssnano-utils "^3.0.1" + postcss-value-parser "^4.2.0" + +postcss-overflow-shorthand@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/postcss-overflow-shorthand/-/postcss-overflow-shorthand-3.0.2.tgz#b4e9c89728cd1e4918173dfb95936b75f78d4148" + integrity sha512-odBMVt6PTX7jOE9UNvmnLrFzA9pXS44Jd5shFGGtSHY80QCuJF+14McSy0iavZggRZ9Oj//C9vOKQmexvyEJMg== + +postcss-page-break@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/postcss-page-break/-/postcss-page-break-3.0.4.tgz#7fbf741c233621622b68d435babfb70dd8c1ee5f" + integrity sha512-1JGu8oCjVXLa9q9rFTo4MbeeA5FMe00/9C7lN4va606Rdb+HkxXtXsmEDrIraQ11fGz/WvKWa8gMuCKkrXpTsQ== + +postcss-place@^7.0.3: + version "7.0.3" + resolved "https://registry.yarnpkg.com/postcss-place/-/postcss-place-7.0.3.tgz#ca8040dfd937c7769a233a3bd6e66e139cf89e62" + integrity sha512-tDQ3m+GYoOar+KoQgj+pwPAvGHAp/Sby6vrFiyrELrMKQJ4AejL0NcS0mm296OKKYA2SRg9ism/hlT/OLhBrdQ== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-preset-env@^7.0.1: + version "7.2.3" + resolved "https://registry.yarnpkg.com/postcss-preset-env/-/postcss-preset-env-7.2.3.tgz#01b9b6eea0ff16c27a3d514f10105d56363428a6" + integrity sha512-Ok0DhLfwrcNGrBn8sNdy1uZqWRk/9FId0GiQ39W4ILop5GHtjJs8bu1MY9isPwHInpVEPWjb4CEcEaSbBLpfwA== + dependencies: + autoprefixer "^10.4.2" + browserslist "^4.19.1" + caniuse-lite "^1.0.30001299" + css-blank-pseudo "^3.0.2" + css-has-pseudo "^3.0.3" + css-prefers-color-scheme "^6.0.2" + cssdb "^5.0.0" + postcss-attribute-case-insensitive "^5.0.0" + postcss-color-functional-notation "^4.2.1" + postcss-color-hex-alpha "^8.0.2" + postcss-color-rebeccapurple "^7.0.2" + postcss-custom-media "^8.0.0" + postcss-custom-properties "^12.1.2" + postcss-custom-selectors "^6.0.0" + postcss-dir-pseudo-class "^6.0.3" + postcss-double-position-gradients "^3.0.4" + postcss-env-function "^4.0.4" + postcss-focus-visible "^6.0.3" + postcss-focus-within "^5.0.3" + postcss-font-variant "^5.0.0" + postcss-gap-properties "^3.0.2" + postcss-image-set-function "^4.0.4" + postcss-initial "^4.0.1" + postcss-lab-function "^4.0.3" + postcss-logical "^5.0.3" + postcss-media-minmax "^5.0.0" + postcss-nesting "^10.1.2" + postcss-overflow-shorthand "^3.0.2" + postcss-page-break "^3.0.4" + postcss-place "^7.0.3" + postcss-pseudo-class-any-link "^7.0.2" + postcss-replace-overflow-wrap "^4.0.0" + postcss-selector-not "^5.0.0" + +postcss-pseudo-class-any-link@^7.0.2: + version "7.0.2" + resolved "https://registry.yarnpkg.com/postcss-pseudo-class-any-link/-/postcss-pseudo-class-any-link-7.0.2.tgz#6284c2f970715c78fe992d2fac1130e9991585c9" + integrity sha512-CG35J1COUH7OOBgpw5O+0koOLUd5N4vUGKUqSAuIe4GiuLHWU96Pqp+UPC8QITTd12zYAFx76pV7qWT/0Aj/TA== + dependencies: + postcss-selector-parser "^6.0.8" + +postcss-reduce-initial@^5.0.2: + version "5.0.2" + resolved "https://registry.yarnpkg.com/postcss-reduce-initial/-/postcss-reduce-initial-5.0.2.tgz#fa424ce8aa88a89bc0b6d0f94871b24abe94c048" + integrity sha512-v/kbAAQ+S1V5v9TJvbGkV98V2ERPdU6XvMcKMjqAlYiJ2NtsHGlKYLPjWWcXlaTKNxooId7BGxeraK8qXvzKtw== + dependencies: + browserslist "^4.16.6" + caniuse-api "^3.0.0" + +postcss-reduce-transforms@^5.0.3: + version "5.0.3" + resolved "https://registry.yarnpkg.com/postcss-reduce-transforms/-/postcss-reduce-transforms-5.0.3.tgz#df60fab34698a43073e8b87938c71df7a3b040ac" + integrity sha512-yDnTUab5i7auHiNwdcL1f+pBnqQFf+7eC4cbC7D8Lc1FkvNZhtpkdad+9U4wDdFb84haupMf0rA/Zc5LcTe/3A== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-replace-overflow-wrap@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/postcss-replace-overflow-wrap/-/postcss-replace-overflow-wrap-4.0.0.tgz#d2df6bed10b477bf9c52fab28c568b4b29ca4319" + integrity sha512-KmF7SBPphT4gPPcKZc7aDkweHiKEEO8cla/GjcBK+ckKxiZslIu3C4GCRW3DNfL0o7yW7kMQu9xlZ1kXRXLXtw== + +postcss-selector-not@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/postcss-selector-not/-/postcss-selector-not-5.0.0.tgz#ac5fc506f7565dd872f82f5314c0f81a05630dc7" + integrity sha512-/2K3A4TCP9orP4TNS7u3tGdRFVKqz/E6pX3aGnriPG0jU78of8wsUcqE4QAhWEU0d+WnMSF93Ah3F//vUtK+iQ== + dependencies: + balanced-match "^1.0.0" + +postcss-selector-parser@^6.0.2, postcss-selector-parser@^6.0.4, postcss-selector-parser@^6.0.5, postcss-selector-parser@^6.0.6, postcss-selector-parser@^6.0.8, postcss-selector-parser@^6.0.9: + version "6.0.9" + resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.0.9.tgz#ee71c3b9ff63d9cd130838876c13a2ec1a992b2f" + integrity sha512-UO3SgnZOVTwu4kyLR22UQ1xZh086RyNZppb7lLAKBFK8a32ttG5i87Y/P3+2bRSjZNyJ1B7hfFNo273tKe9YxQ== + dependencies: + cssesc "^3.0.0" + util-deprecate "^1.0.2" + +postcss-svgo@^5.0.3: + version "5.0.3" + resolved "https://registry.yarnpkg.com/postcss-svgo/-/postcss-svgo-5.0.3.tgz#d945185756e5dfaae07f9edb0d3cae7ff79f9b30" + integrity sha512-41XZUA1wNDAZrQ3XgWREL/M2zSw8LJPvb5ZWivljBsUQAGoEKMYm6okHsTjJxKYI4M75RQEH4KYlEM52VwdXVA== + dependencies: + postcss-value-parser "^4.1.0" + svgo "^2.7.0" + +postcss-unique-selectors@^5.0.3: + version "5.0.3" + resolved "https://registry.yarnpkg.com/postcss-unique-selectors/-/postcss-unique-selectors-5.0.3.tgz#07fd116a8fbd9202e7030f7c4952e7b52c26c63d" + integrity sha512-V5tX2hadSSn+miVCluuK1IDGy+7jAXSOfRZ2DQ+s/4uQZb/orDYBjH0CHgFrXsRw78p4QTuEFA9kI6C956UnHQ== + dependencies: + postcss-selector-parser "^6.0.5" + +postcss-value-parser@^4.0.2, postcss-value-parser@^4.1.0, postcss-value-parser@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz#723c09920836ba6d3e5af019f92bc0971c02e514" + integrity sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ== + +postcss@^7.0.35: + version "7.0.39" + resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.39.tgz#9624375d965630e2e1f2c02a935c82a59cb48309" + integrity sha512-yioayjNbHn6z1/Bywyb2Y4s3yvDAeXGOyxqD+LnVOinq6Mdmd++SW2wUNVzavyyHxd6+DxzWGIuosg6P1Rj8uA== + dependencies: + picocolors "^0.2.1" + source-map "^0.6.1" + +postcss@^8.2.15, postcss@^8.3.5, postcss@^8.4.4: + version "8.4.5" + resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.5.tgz#bae665764dfd4c6fcc24dc0fdf7e7aa00cc77f95" + integrity sha512-jBDboWM8qpaqwkMwItqTQTiFikhs/67OYVvblFFTM7MrZjt6yMKd6r2kgXizEbTTljacm4NldIlZnhbjr84QYg== + dependencies: + nanoid "^3.1.30" + picocolors "^1.0.0" + source-map-js "^1.0.1" + +prelude-ls@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" + integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g== + +prelude-ls@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" + integrity sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ= + +pretty-bytes@^5.3.0, pretty-bytes@^5.4.1: + version "5.6.0" + resolved "https://registry.yarnpkg.com/pretty-bytes/-/pretty-bytes-5.6.0.tgz#356256f643804773c82f64723fe78c92c62beaeb" + integrity sha512-FFw039TmrBqFK8ma/7OL3sDz/VytdtJr044/QUJtH0wK9lb9jLq9tJyIxUwtQJHwar2BqtiA4iCWSwo9JLkzFg== + +pretty-error@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/pretty-error/-/pretty-error-4.0.0.tgz#90a703f46dd7234adb46d0f84823e9d1cb8f10d6" + integrity sha512-AoJ5YMAcXKYxKhuJGdcvse+Voc6v1RgnsR3nWcYU7q4t6z0Q6T86sv5Zq8VIRbOWWFpvdGE83LtdSMNd+6Y0xw== + dependencies: + lodash "^4.17.20" + renderkid "^3.0.0" + +pretty-format@^27.0.0, pretty-format@^27.0.2, pretty-format@^27.4.6: + version "27.4.6" + resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-27.4.6.tgz#1b784d2f53c68db31797b2348fa39b49e31846b7" + integrity sha512-NblstegA1y/RJW2VyML+3LlpFjzx62cUrtBIKIWDXEDkjNeleA7Od7nrzcs/VLQvAeV4CgSYhrN39DRN88Qi/g== + dependencies: + ansi-regex "^5.0.1" + ansi-styles "^5.0.0" + react-is "^17.0.1" + +prismjs@~1.25.0: + version "1.25.0" + resolved "https://registry.yarnpkg.com/prismjs/-/prismjs-1.25.0.tgz#6f822df1bdad965734b310b315a23315cf999756" + integrity sha512-WCjJHl1KEWbnkQom1+SzftbtXMKQoezOCYs5rECqMN+jP+apI7ftoflyqigqzopSO3hMhTEb0mFClA8lkolgEg== + +process-nextick-args@~2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" + integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== + +promise@^8.1.0: + version "8.1.0" + resolved "https://registry.yarnpkg.com/promise/-/promise-8.1.0.tgz#697c25c3dfe7435dd79fcd58c38a135888eaf05e" + integrity sha512-W04AqnILOL/sPRXziNicCjSNRruLAuIHEOVBazepu0545DDNGYHz7ar9ZgZ1fMU8/MA4mVxp5rkBWRi6OXIy3Q== + dependencies: + asap "~2.0.6" + +prompts@^2.0.1, prompts@^2.4.2: + version "2.4.2" + resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.4.2.tgz#7b57e73b3a48029ad10ebd44f74b01722a4cb069" + integrity sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q== + dependencies: + kleur "^3.0.3" + sisteransi "^1.0.5" + +prop-types@^15.5.8, prop-types@^15.6.0, prop-types@^15.6.2, prop-types@^15.7.2, prop-types@^15.8.1: + version "15.8.1" + resolved "https://registry.yarnpkg.com/prop-types/-/prop-types-15.8.1.tgz#67d87bf1a694f48435cf332c24af10214a3140b5" + integrity sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg== + dependencies: + loose-envify "^1.4.0" + object-assign "^4.1.1" + react-is "^16.13.1" + +property-information@^5.0.0, property-information@^5.3.0: + version "5.6.0" + resolved "https://registry.yarnpkg.com/property-information/-/property-information-5.6.0.tgz#61675545fb23002f245c6540ec46077d4da3ed69" + integrity sha512-YUHSPk+A30YPv+0Qf8i9Mbfe/C0hdPXk1s1jPVToV8pk8BQtpw10ct89Eo7OWkutrwqvT0eicAxlOg3dOAu8JA== + dependencies: + xtend "^4.0.0" + +proxy-addr@~2.0.7: + version "2.0.7" + resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.7.tgz#f19fe69ceab311eeb94b42e70e8c2070f9ba1025" + integrity sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg== + dependencies: + forwarded "0.2.0" + ipaddr.js "1.9.1" + +psl@^1.1.33: + version "1.8.0" + resolved "https://registry.yarnpkg.com/psl/-/psl-1.8.0.tgz#9326f8bcfb013adcc005fdff056acce020e51c24" + integrity sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ== + +punycode@^2.1.0, punycode@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" + integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== + +q@^1.1.2: + version "1.5.1" + resolved "https://registry.yarnpkg.com/q/-/q-1.5.1.tgz#7e32f75b41381291d04611f1bf14109ac00651d7" + integrity sha1-fjL3W0E4EpHQRhHxvxQQmsAGUdc= + +qs@6.9.6: + version "6.9.6" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.9.6.tgz#26ed3c8243a431b2924aca84cc90471f35d5a0ee" + integrity sha512-TIRk4aqYLNoJUbd+g2lEdz5kLWIuTMRagAXxl78Q0RiVjAOugHmeKNGdd3cwo/ktpf9aL9epCfFqWDEKysUlLQ== + +query-string@^7.1.1: + version "7.1.1" + resolved "https://registry.yarnpkg.com/query-string/-/query-string-7.1.1.tgz#754620669db978625a90f635f12617c271a088e1" + integrity sha512-MplouLRDHBZSG9z7fpuAAcI7aAYjDLhtsiVZsevsfaHWDS2IDdORKbSd1kWUA+V4zyva/HZoSfpwnYMMQDhb0w== + dependencies: + decode-uri-component "^0.2.0" + filter-obj "^1.1.0" + split-on-first "^1.0.0" + strict-uri-encode "^2.0.0" + +querystringify@^2.1.1: + version "2.2.0" + resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-2.2.0.tgz#3345941b4153cb9d082d8eee4cda2016a9aef7f6" + integrity sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ== + +queue-microtask@^1.2.2: + version "1.2.3" + resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" + integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== + +quick-lru@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/quick-lru/-/quick-lru-5.1.1.tgz#366493e6b3e42a3a6885e2e99d18f80fb7a8c932" + integrity sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA== + +raf-schd@^4.0.2: + version "4.0.3" + resolved "https://registry.yarnpkg.com/raf-schd/-/raf-schd-4.0.3.tgz#5d6c34ef46f8b2a0e880a8fcdb743efc5bfdbc1a" + integrity sha512-tQkJl2GRWh83ui2DiPTJz9wEiMN20syf+5oKfB03yYP7ioZcJwsIK8FjrtLwH1m7C7e+Tt2yYBlrOpdT+dyeIQ== + +raf@^3.4.1: + version "3.4.1" + resolved "https://registry.yarnpkg.com/raf/-/raf-3.4.1.tgz#0742e99a4a6552f445d73e3ee0328af0ff1ede39" + integrity sha512-Sq4CW4QhwOHE8ucn6J34MqtZCeWFP2aQSmrlroYgqAV1PjStIhJXxYuTgUIfkEk7zTLjmIjLmU5q+fbD1NnOJA== + dependencies: + performance-now "^2.1.0" + +randombytes@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a" + integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ== + dependencies: + safe-buffer "^5.1.0" + +range-parser@^1.2.1, range-parser@~1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" + integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== + +raw-body@2.4.2: + version "2.4.2" + resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.4.2.tgz#baf3e9c21eebced59dd6533ac872b71f7b61cb32" + integrity sha512-RPMAFUJP19WIet/99ngh6Iv8fzAbqum4Li7AD6DtGaW2RpMB/11xDoalPiJMTbu6I3hkbMVkATvZrqb9EEqeeQ== + dependencies: + bytes "3.1.1" + http-errors "1.8.1" + iconv-lite "0.4.24" + unpipe "1.0.0" + +react-app-polyfill@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/react-app-polyfill/-/react-app-polyfill-3.0.0.tgz#95221e0a9bd259e5ca6b177c7bb1cb6768f68fd7" + integrity sha512-sZ41cxiU5llIB003yxxQBYrARBqe0repqPTTYBTmMqTz9szeBbE37BehCE891NZsmdZqqP+xWKdT3eo3vOzN8w== + dependencies: + core-js "^3.19.2" + object-assign "^4.1.1" + promise "^8.1.0" + raf "^3.4.1" + regenerator-runtime "^0.13.9" + whatwg-fetch "^3.6.2" + +react-beautiful-dnd@^13.0.0: + version "13.1.0" + resolved "https://registry.yarnpkg.com/react-beautiful-dnd/-/react-beautiful-dnd-13.1.0.tgz#ec97c81093593526454b0de69852ae433783844d" + integrity sha512-aGvblPZTJowOWUNiwd6tNfEpgkX5OxmpqxHKNW/4VmvZTNTbeiq7bA3bn5T+QSF2uibXB0D1DmJsb1aC/+3cUA== + dependencies: + "@babel/runtime" "^7.9.2" + css-box-model "^1.2.0" + memoize-one "^5.1.1" + raf-schd "^4.0.2" + react-redux "^7.2.0" + redux "^4.0.4" + use-memo-one "^1.1.1" + +react-clientside-effect@^1.2.5: + version "1.2.5" + resolved "https://registry.yarnpkg.com/react-clientside-effect/-/react-clientside-effect-1.2.5.tgz#e2c4dc3c9ee109f642fac4f5b6e9bf5bcd2219a3" + integrity sha512-2bL8qFW1TGBHozGGbVeyvnggRpMjibeZM2536AKNENLECutp2yfs44IL8Hmpn8qjFQ2K7A9PnYf3vc7aQq/cPA== + dependencies: + "@babel/runtime" "^7.12.13" + +react-dev-utils@^12.0.0: + version "12.0.0" + resolved "https://registry.yarnpkg.com/react-dev-utils/-/react-dev-utils-12.0.0.tgz#4eab12cdb95692a077616770b5988f0adf806526" + integrity sha512-xBQkitdxozPxt1YZ9O1097EJiVpwHr9FoAuEVURCKV0Av8NBERovJauzP7bo1ThvuhZ4shsQ1AJiu4vQpoT1AQ== + dependencies: + "@babel/code-frame" "^7.16.0" + address "^1.1.2" + browserslist "^4.18.1" + chalk "^4.1.2" + cross-spawn "^7.0.3" + detect-port-alt "^1.1.6" + escape-string-regexp "^4.0.0" + filesize "^8.0.6" + find-up "^5.0.0" + fork-ts-checker-webpack-plugin "^6.5.0" + global-modules "^2.0.0" + globby "^11.0.4" + gzip-size "^6.0.0" + immer "^9.0.7" + is-root "^2.1.0" + loader-utils "^3.2.0" + open "^8.4.0" + pkg-up "^3.1.0" + prompts "^2.4.2" + react-error-overlay "^6.0.10" + recursive-readdir "^2.2.2" + shell-quote "^1.7.3" + strip-ansi "^6.0.1" + text-table "^0.2.0" + +react-dom@^17.0.2: + version "17.0.2" + resolved "https://registry.yarnpkg.com/react-dom/-/react-dom-17.0.2.tgz#ecffb6845e3ad8dbfcdc498f0d0a939736502c23" + integrity sha512-s4h96KtLDUQlsENhMn1ar8t2bEa+q/YAtj8pPPdIjPDGBDIVNsrD9aXNWqspUe6AzKCIG0C1HZZLqLV7qpOBGA== + dependencies: + loose-envify "^1.1.0" + object-assign "^4.1.1" + scheduler "^0.20.2" + +react-dropzone@^11.2.0: + version "11.5.3" + resolved "https://registry.yarnpkg.com/react-dropzone/-/react-dropzone-11.5.3.tgz#757d4980fcae839896a894e41d3e72df04981f86" + integrity sha512-68+T6sWW5L89qJnn3SD1aRazhuRBhTT9JOI1W8vI5YWsfegM4C7tlGbPH1AgEbmZY5s8E8L0QhX0e3VdAa0KWA== + dependencies: + attr-accept "^2.2.1" + file-selector "^0.2.2" + prop-types "^15.7.2" + +react-error-overlay@^6.0.10: + version "6.0.10" + resolved "https://registry.yarnpkg.com/react-error-overlay/-/react-error-overlay-6.0.10.tgz#0fe26db4fa85d9dbb8624729580e90e7159a59a6" + integrity sha512-mKR90fX7Pm5seCOfz8q9F+66VCc1PGsWSBxKbITjfKVQHMNF2zudxHnMdJiB1fRCb+XsbQV9sO9DCkgsMQgBIA== + +react-focus-lock@^2.6.0: + version "2.7.1" + resolved "https://registry.yarnpkg.com/react-focus-lock/-/react-focus-lock-2.7.1.tgz#a9fbb3fa4efaee32162406e5eb96ae658964193b" + integrity sha512-ImSeVmcrLKNMqzUsIdqOkXwTVltj79OPu43oT8tVun7eIckA4VdM7UmYUFo3H/UC2nRVgagMZGFnAOQEDiDYcA== + dependencies: + "@babel/runtime" "^7.0.0" + focus-lock "^0.10.1" + prop-types "^15.6.2" + react-clientside-effect "^1.2.5" + use-callback-ref "^1.2.5" + use-sidecar "^1.0.5" + +react-focus-on@^3.5.0: + version "3.5.4" + resolved "https://registry.yarnpkg.com/react-focus-on/-/react-focus-on-3.5.4.tgz#be45a9d0495f3bb6f5249704c85362df94980ecf" + integrity sha512-HnU0YGKhNSUsC4k6K8L+2wk8mC/qdg+CsS7A1bWLMgK7UuBphdECs2esnS6cLmBoVNjsFnCm/vMypeezKOdK3A== + dependencies: + aria-hidden "^1.1.3" + react-focus-lock "^2.6.0" + react-remove-scroll "^2.4.1" + react-style-singleton "^2.1.1" + tslib "^2.3.1" + use-callback-ref "^1.2.5" + use-sidecar "^1.0.5" + +react-input-autosize@^2.2.2: + version "2.2.2" + resolved "https://registry.yarnpkg.com/react-input-autosize/-/react-input-autosize-2.2.2.tgz#fcaa7020568ec206bc04be36f4eb68e647c4d8c2" + integrity sha512-jQJgYCA3S0j+cuOwzuCd1OjmBmnZLdqQdiLKRYrsMMzbjUrVDS5RvJUDwJqA7sKuksDuzFtm6hZGKFu7Mjk5aw== + dependencies: + prop-types "^15.5.8" + +react-is@^16.13.1, react-is@^16.7.0: + version "16.13.1" + resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4" + integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ== + +react-is@^17.0.1, react-is@^17.0.2: + version "17.0.2" + resolved "https://registry.yarnpkg.com/react-is/-/react-is-17.0.2.tgz#e691d4a8e9c789365655539ab372762b0efb54f0" + integrity sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w== + +react-is@~16.3.0: + version "16.3.2" + resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.3.2.tgz#f4d3d0e2f5fbb6ac46450641eb2e25bf05d36b22" + integrity sha512-ybEM7YOr4yBgFd6w8dJqwxegqZGJNBZl6U27HnGKuTZmDvVrD5quWOK/wAnMywiZzW+Qsk+l4X2c70+thp/A8Q== + +react-query@^3.34.12: + version "3.34.12" + resolved "https://registry.yarnpkg.com/react-query/-/react-query-3.34.12.tgz#dcaaf7b629f0868aae8afef9fb7692f6ea7643bf" + integrity sha512-flDdudQVH4CqE+kNYYYyo4g2Yjek3H/36G3b9bK5oe26jD5gFnx+PPwnq0gayq5z2dcSfr2z4+drvuyeZ3A5QQ== + dependencies: + "@babel/runtime" "^7.5.5" + broadcast-channel "^3.4.1" + match-sorter "^6.0.2" + +react-redux@^7.2.0: + version "7.2.6" + resolved "https://registry.yarnpkg.com/react-redux/-/react-redux-7.2.6.tgz#49633a24fe552b5f9caf58feb8a138936ddfe9aa" + integrity sha512-10RPdsz0UUrRL1NZE0ejTkucnclYSgXp5q+tB5SWx2qeG2ZJQJyymgAhwKy73yiL/13btfB6fPr+rgbMAaZIAQ== + dependencies: + "@babel/runtime" "^7.15.4" + "@types/react-redux" "^7.1.20" + hoist-non-react-statics "^3.3.2" + loose-envify "^1.4.0" + prop-types "^15.7.2" + react-is "^17.0.2" + +react-refresh@^0.11.0: + version "0.11.0" + resolved "https://registry.yarnpkg.com/react-refresh/-/react-refresh-0.11.0.tgz#77198b944733f0f1f1a90e791de4541f9f074046" + integrity sha512-F27qZr8uUqwhWZboondsPx8tnC3Ct3SxZA3V5WyEvujRyyNv0VYPhoBg1gZ8/MV5tubQp76Trw8lTv9hzRBa+A== + +react-remove-scroll-bar@^2.1.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/react-remove-scroll-bar/-/react-remove-scroll-bar-2.2.0.tgz#d4d545a7df024f75d67e151499a6ab5ac97c8cdd" + integrity sha512-UU9ZBP1wdMR8qoUs7owiVcpaPwsQxUDC2lypP6mmixaGlARZa7ZIBx1jcuObLdhMOvCsnZcvetOho0wzPa9PYg== + dependencies: + react-style-singleton "^2.1.0" + tslib "^1.0.0" + +react-remove-scroll@^2.4.1: + version "2.4.3" + resolved "https://registry.yarnpkg.com/react-remove-scroll/-/react-remove-scroll-2.4.3.tgz#83d19b02503b04bd8141ed6e0b9e6691a2e935a6" + integrity sha512-lGWYXfV6jykJwbFpsuPdexKKzp96f3RbvGapDSIdcyGvHb7/eqyn46C7/6h+rUzYar1j5mdU+XECITHXCKBk9Q== + dependencies: + react-remove-scroll-bar "^2.1.0" + react-style-singleton "^2.1.0" + tslib "^1.0.0" + use-callback-ref "^1.2.3" + use-sidecar "^1.0.1" + +react-router-dom@6: + version "6.2.1" + resolved "https://registry.yarnpkg.com/react-router-dom/-/react-router-dom-6.2.1.tgz#32ec81829152fbb8a7b045bf593a22eadf019bec" + integrity sha512-I6Zax+/TH/cZMDpj3/4Fl2eaNdcvoxxHoH1tYOREsQ22OKDYofGebrNm6CTPUcvLvZm63NL/vzCYdjf9CUhqmA== + dependencies: + history "^5.2.0" + react-router "6.2.1" + +react-router@6.2.1: + version "6.2.1" + resolved "https://registry.yarnpkg.com/react-router/-/react-router-6.2.1.tgz#be2a97a6006ce1d9123c28934e604faef51448a3" + integrity sha512-2fG0udBtxou9lXtK97eJeET2ki5//UWfQSl1rlJ7quwe6jrktK9FCCc8dQb5QY6jAv3jua8bBQRhhDOM/kVRsg== + dependencies: + history "^5.2.0" + +react-scripts@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/react-scripts/-/react-scripts-5.0.0.tgz#6547a6d7f8b64364ef95273767466cc577cb4b60" + integrity sha512-3i0L2CyIlROz7mxETEdfif6Sfhh9Lfpzi10CtcGs1emDQStmZfWjJbAIMtRD0opVUjQuFWqHZyRZ9PPzKCFxWg== + dependencies: + "@babel/core" "^7.16.0" + "@pmmmwh/react-refresh-webpack-plugin" "^0.5.3" + "@svgr/webpack" "^5.5.0" + babel-jest "^27.4.2" + babel-loader "^8.2.3" + babel-plugin-named-asset-import "^0.3.8" + babel-preset-react-app "^10.0.1" + bfj "^7.0.2" + browserslist "^4.18.1" + camelcase "^6.2.1" + case-sensitive-paths-webpack-plugin "^2.4.0" + css-loader "^6.5.1" + css-minimizer-webpack-plugin "^3.2.0" + dotenv "^10.0.0" + dotenv-expand "^5.1.0" + eslint "^8.3.0" + eslint-config-react-app "^7.0.0" + eslint-webpack-plugin "^3.1.1" + file-loader "^6.2.0" + fs-extra "^10.0.0" + html-webpack-plugin "^5.5.0" + identity-obj-proxy "^3.0.0" + jest "^27.4.3" + jest-resolve "^27.4.2" + jest-watch-typeahead "^1.0.0" + mini-css-extract-plugin "^2.4.5" + postcss "^8.4.4" + postcss-flexbugs-fixes "^5.0.2" + postcss-loader "^6.2.1" + postcss-normalize "^10.0.1" + postcss-preset-env "^7.0.1" + prompts "^2.4.2" + react-app-polyfill "^3.0.0" + react-dev-utils "^12.0.0" + react-refresh "^0.11.0" + resolve "^1.20.0" + resolve-url-loader "^4.0.0" + sass-loader "^12.3.0" + semver "^7.3.5" + source-map-loader "^3.0.0" + style-loader "^3.3.1" + tailwindcss "^3.0.2" + terser-webpack-plugin "^5.2.5" + webpack "^5.64.4" + webpack-dev-server "^4.6.0" + webpack-manifest-plugin "^4.0.2" + workbox-webpack-plugin "^6.4.1" + optionalDependencies: + fsevents "^2.3.2" + +react-style-singleton@^2.1.0, react-style-singleton@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/react-style-singleton/-/react-style-singleton-2.1.1.tgz#ce7f90b67618be2b6b94902a30aaea152ce52e66" + integrity sha512-jNRp07Jza6CBqdRKNgGhT3u9umWvils1xsuMOjZlghBDH2MU0PL2WZor4PGYjXpnRCa9DQSlHMs/xnABWOwYbA== + dependencies: + get-nonce "^1.0.0" + invariant "^2.2.4" + tslib "^1.0.0" + +react-virtualized-auto-sizer@^1.0.2: + version "1.0.6" + resolved "https://registry.yarnpkg.com/react-virtualized-auto-sizer/-/react-virtualized-auto-sizer-1.0.6.tgz#66c5b1c9278064c5ef1699ed40a29c11518f97ca" + integrity sha512-7tQ0BmZqfVF6YYEWcIGuoR3OdYe8I/ZFbNclFlGOC3pMqunkYF/oL30NCjSGl9sMEb17AnzixDz98Kqc3N76HQ== + +react-window@^1.8.5: + version "1.8.6" + resolved "https://registry.yarnpkg.com/react-window/-/react-window-1.8.6.tgz#d011950ac643a994118632665aad0c6382e2a112" + integrity sha512-8VwEEYyjz6DCnGBsd+MgkD0KJ2/OXFULyDtorIiTz+QzwoP94tBoA7CnbtyXMm+cCeAUER5KJcPtWl9cpKbOBg== + dependencies: + "@babel/runtime" "^7.0.0" + memoize-one ">=3.1.1 <6" + +react@^17.0.2: + version "17.0.2" + resolved "https://registry.yarnpkg.com/react/-/react-17.0.2.tgz#d0b5cc516d29eb3eee383f75b62864cfb6800037" + integrity sha512-gnhPt75i/dq/z3/6q/0asP78D0u592D5L1pd7M8P+dck6Fu/jJeL6iVVK23fptSUZj8Vjf++7wXA8UNclGQcbA== + dependencies: + loose-envify "^1.1.0" + object-assign "^4.1.1" + +readable-stream@^2.0.1: + version "2.3.7" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57" + integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw== + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.3" + isarray "~1.0.0" + process-nextick-args "~2.0.0" + safe-buffer "~5.1.1" + string_decoder "~1.1.1" + util-deprecate "~1.0.1" + +readable-stream@^3.0.6, readable-stream@^3.4.0: + version "3.6.0" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198" + integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA== + dependencies: + inherits "^2.0.3" + string_decoder "^1.1.1" + util-deprecate "^1.0.1" + +readdirp@~3.6.0: + version "3.6.0" + resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.6.0.tgz#74a370bd857116e245b29cc97340cd431a02a6c7" + integrity sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA== + dependencies: + picomatch "^2.2.1" + +recursive-readdir@^2.2.2: + version "2.2.2" + resolved "https://registry.yarnpkg.com/recursive-readdir/-/recursive-readdir-2.2.2.tgz#9946fb3274e1628de6e36b2f6714953b4845094f" + integrity sha512-nRCcW9Sj7NuZwa2XvH9co8NPeXUBhZP7CRKJtU+cS6PW9FpCIFoI5ib0NT1ZrbNuPoRy0ylyCaUL8Gih4LSyFg== + dependencies: + minimatch "3.0.4" + +redent@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/redent/-/redent-3.0.0.tgz#e557b7998316bb53c9f1f56fa626352c6963059f" + integrity sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg== + dependencies: + indent-string "^4.0.0" + strip-indent "^3.0.0" + +redux@^4.0.0, redux@^4.0.4: + version "4.1.2" + resolved "https://registry.yarnpkg.com/redux/-/redux-4.1.2.tgz#140f35426d99bb4729af760afcf79eaaac407104" + integrity sha512-SH8PglcebESbd/shgf6mii6EIoRM0zrQyjcuQ+ojmfxjTtE0z9Y8pa62iA/OJ58qjP6j27uyW4kUF4jl/jd6sw== + dependencies: + "@babel/runtime" "^7.9.2" + +refractor@^3.5.0: + version "3.5.0" + resolved "https://registry.yarnpkg.com/refractor/-/refractor-3.5.0.tgz#334586f352dda4beaf354099b48c2d18e0819aec" + integrity sha512-QwPJd3ferTZ4cSPPjdP5bsYHMytwWYnAN5EEnLtGvkqp/FCCnGsBgxrm9EuIDnjUC3Uc/kETtvVi7fSIVC74Dg== + dependencies: + hastscript "^6.0.0" + parse-entities "^2.0.0" + prismjs "~1.25.0" + +regenerate-unicode-properties@^9.0.0: + version "9.0.0" + resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-9.0.0.tgz#54d09c7115e1f53dc2314a974b32c1c344efe326" + integrity sha512-3E12UeNSPfjrgwjkR81m5J7Aw/T55Tu7nUyZVQYCKEOs+2dkxEY+DpPtZzO4YruuiPb7NkYLVcyJC4+zCbk5pA== + dependencies: + regenerate "^1.4.2" + +regenerate@^1.4.2: + version "1.4.2" + resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.4.2.tgz#b9346d8827e8f5a32f7ba29637d398b69014848a" + integrity sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A== + +regenerator-runtime@^0.13.4, regenerator-runtime@^0.13.9: + version "0.13.9" + resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz#8925742a98ffd90814988d7566ad30ca3b263b52" + integrity sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA== + +regenerator-transform@^0.14.2: + version "0.14.5" + resolved "https://registry.yarnpkg.com/regenerator-transform/-/regenerator-transform-0.14.5.tgz#c98da154683671c9c4dcb16ece736517e1b7feb4" + integrity sha512-eOf6vka5IO151Jfsw2NO9WpGX58W6wWmefK3I1zEGr0lOD0u8rwPaNqQL1aRxUaxLeKO3ArNh3VYg1KbaD+FFw== + dependencies: + "@babel/runtime" "^7.8.4" + +regex-parser@^2.2.11: + version "2.2.11" + resolved "https://registry.yarnpkg.com/regex-parser/-/regex-parser-2.2.11.tgz#3b37ec9049e19479806e878cabe7c1ca83ccfe58" + integrity sha512-jbD/FT0+9MBU2XAZluI7w2OBs1RBi6p9M83nkoZayQXXU9e8Robt69FcZc7wU4eJD/YFTjn1JdCk3rbMJajz8Q== + +regexp.prototype.flags@^1.2.0, regexp.prototype.flags@^1.3.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.4.1.tgz#b3f4c0059af9e47eca9f3f660e51d81307e72307" + integrity sha512-pMR7hBVUUGI7PMA37m2ofIdQCsomVnas+Jn5UPGAHQ+/LlwKm/aTLJHdasmHRzlfeZwHiAOaRSo2rbBDm3nNUQ== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + +regexpp@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-3.2.0.tgz#0425a2768d8f23bad70ca4b90461fa2f1213e1b2" + integrity sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg== + +regexpu-core@^4.7.1: + version "4.8.0" + resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-4.8.0.tgz#e5605ba361b67b1718478501327502f4479a98f0" + integrity sha512-1F6bYsoYiz6is+oz70NWur2Vlh9KWtswuRuzJOfeYUrfPX2o8n74AnUVaOGDbUqVGO9fNHu48/pjJO4sNVwsOg== + dependencies: + regenerate "^1.4.2" + regenerate-unicode-properties "^9.0.0" + regjsgen "^0.5.2" + regjsparser "^0.7.0" + unicode-match-property-ecmascript "^2.0.0" + unicode-match-property-value-ecmascript "^2.0.0" + +regjsgen@^0.5.2: + version "0.5.2" + resolved "https://registry.yarnpkg.com/regjsgen/-/regjsgen-0.5.2.tgz#92ff295fb1deecbf6ecdab2543d207e91aa33733" + integrity sha512-OFFT3MfrH90xIW8OOSyUrk6QHD5E9JOTeGodiJeBS3J6IwlgzJMNE/1bZklWz5oTg+9dCMyEetclvCVXOPoN3A== + +regjsparser@^0.7.0: + version "0.7.0" + resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.7.0.tgz#a6b667b54c885e18b52554cb4960ef71187e9968" + integrity sha512-A4pcaORqmNMDVwUjWoTzuhwMGpP+NykpfqAsEgI1FSH/EzC7lrN5TMd+kN8YCovX+jMpu8eaqXgXPCa0g8FQNQ== + dependencies: + jsesc "~0.5.0" + +rehype-raw@^5.0.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/rehype-raw/-/rehype-raw-5.1.0.tgz#66d5e8d7188ada2d31bc137bc19a1000cf2c6b7e" + integrity sha512-MDvHAb/5mUnif2R+0IPCYJU8WjHa9UzGtM/F4AVy5GixPlDZ1z3HacYy4xojDU+uBa+0X/3PIfyQI26/2ljJNA== + dependencies: + hast-util-raw "^6.1.0" + +rehype-react@^6.0.0: + version "6.2.1" + resolved "https://registry.yarnpkg.com/rehype-react/-/rehype-react-6.2.1.tgz#9b9bf188451ad6f63796b784fe1f51165c67b73a" + integrity sha512-f9KIrjktvLvmbGc7si25HepocOg4z0MuNOtweigKzBcDjiGSTGhyz6VSgaV5K421Cq1O+z4/oxRJ5G9owo0KVg== + dependencies: + "@mapbox/hast-util-table-cell-style" "^0.2.0" + hast-to-hyperscript "^9.0.0" + +rehype-stringify@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/rehype-stringify/-/rehype-stringify-8.0.0.tgz#9b6afb599bcf3165f10f93fc8548f9a03d2ec2ba" + integrity sha512-VkIs18G0pj2xklyllrPSvdShAV36Ff3yE5PUO9u36f6+2qJFnn22Z5gKwBOwgXviux4UC7K+/j13AnZfPICi/g== + dependencies: + hast-util-to-html "^7.1.1" + +relateurl@^0.2.7: + version "0.2.7" + resolved "https://registry.yarnpkg.com/relateurl/-/relateurl-0.2.7.tgz#54dbf377e51440aca90a4cd274600d3ff2d888a9" + integrity sha1-VNvzd+UUQKypCkzSdGANP/LYiKk= + +remark-breaks@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/remark-breaks/-/remark-breaks-2.0.2.tgz#55fdec6c7da84f659aa7fdb1aa95b632870cee8d" + integrity sha512-LsQnPPQ7Fzp9RTjj4IwdEmjPOr9bxe9zYKWhs9ZQOg9hMg8rOfeeqQ410cvVdIK87Famqza1CKRxNkepp2EvUA== + dependencies: + unist-util-visit "^2.0.0" + +remark-emoji@^2.1.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/remark-emoji/-/remark-emoji-2.2.0.tgz#1c702090a1525da5b80e15a8f963ef2c8236cac7" + integrity sha512-P3cj9s5ggsUvWw5fS2uzCHJMGuXYRb0NnZqYlNecewXt8QBU9n5vW3DUUKOhepS8F9CwdMx9B8a3i7pqFWAI5w== + dependencies: + emoticon "^3.2.0" + node-emoji "^1.10.0" + unist-util-visit "^2.0.3" + +remark-parse@^8.0.3: + version "8.0.3" + resolved "https://registry.yarnpkg.com/remark-parse/-/remark-parse-8.0.3.tgz#9c62aa3b35b79a486454c690472906075f40c7e1" + integrity sha512-E1K9+QLGgggHxCQtLt++uXltxEprmWzNfg+MxpfHsZlrddKzZ/hZyWHDbK3/Ap8HJQqYJRXP+jHczdL6q6i85Q== + dependencies: + ccount "^1.0.0" + collapse-white-space "^1.0.2" + is-alphabetical "^1.0.0" + is-decimal "^1.0.0" + is-whitespace-character "^1.0.0" + is-word-character "^1.0.0" + markdown-escapes "^1.0.0" + parse-entities "^2.0.0" + repeat-string "^1.5.4" + state-toggle "^1.0.0" + trim "0.0.1" + trim-trailing-lines "^1.0.0" + unherit "^1.0.4" + unist-util-remove-position "^2.0.0" + vfile-location "^3.0.0" + xtend "^4.0.1" + +remark-rehype@^8.0.0: + version "8.1.0" + resolved "https://registry.yarnpkg.com/remark-rehype/-/remark-rehype-8.1.0.tgz#610509a043484c1e697437fa5eb3fd992617c945" + integrity sha512-EbCu9kHgAxKmW1yEYjx3QafMyGY3q8noUbNUI5xyKbaFP89wbhDrKxyIQNukNYthzjNHZu6J7hwFg7hRm1svYA== + dependencies: + mdast-util-to-hast "^10.2.0" + +remove-accents@0.4.2: + version "0.4.2" + resolved "https://registry.yarnpkg.com/remove-accents/-/remove-accents-0.4.2.tgz#0a43d3aaae1e80db919e07ae254b285d9e1c7bb5" + integrity sha1-CkPTqq4egNuRngeuJUsoXZ4ce7U= + +renderkid@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/renderkid/-/renderkid-3.0.0.tgz#5fd823e4d6951d37358ecc9a58b1f06836b6268a" + integrity sha512-q/7VIQA8lmM1hF+jn+sFSPWGlMkSAeNYcPLmDQx2zzuiDfaLrOmumR8iaUKlenFgh0XRPIUeSPlH3A+AW3Z5pg== + dependencies: + css-select "^4.1.3" + dom-converter "^0.2.0" + htmlparser2 "^6.1.0" + lodash "^4.17.21" + strip-ansi "^6.0.1" + +repeat-string@^1.5.4: + version "1.6.1" + resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" + integrity sha1-jcrkcOHIirwtYA//Sndihtp15jc= + +require-directory@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" + integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I= + +require-from-string@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/require-from-string/-/require-from-string-2.0.2.tgz#89a7fdd938261267318eafe14f9c32e598c36909" + integrity sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw== + +requires-port@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" + integrity sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8= + +resolve-cwd@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-3.0.0.tgz#0f0075f1bb2544766cf73ba6a6e2adfebcb13f2d" + integrity sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg== + dependencies: + resolve-from "^5.0.0" + +resolve-from@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" + integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== + +resolve-from@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" + integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw== + +resolve-url-loader@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/resolve-url-loader/-/resolve-url-loader-4.0.0.tgz#d50d4ddc746bb10468443167acf800dcd6c3ad57" + integrity sha512-05VEMczVREcbtT7Bz+C+96eUO5HDNvdthIiMB34t7FcF8ehcu4wC0sSgPUubs3XW2Q3CNLJk/BJrCU9wVRymiA== + dependencies: + adjust-sourcemap-loader "^4.0.0" + convert-source-map "^1.7.0" + loader-utils "^2.0.0" + postcss "^7.0.35" + source-map "0.6.1" + +resolve.exports@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/resolve.exports/-/resolve.exports-1.1.0.tgz#5ce842b94b05146c0e03076985d1d0e7e48c90c9" + integrity sha512-J1l+Zxxp4XK3LUDZ9m60LRJF/mAe4z6a4xyabPHk7pvK5t35dACV32iIjJDFeWZFfZlO29w6SZ67knR0tHzJtQ== + +resolve@^1.14.2, resolve@^1.17.0, resolve@^1.19.0, resolve@^1.20.0, resolve@^1.21.0: + version "1.22.0" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.0.tgz#5e0b8c67c15df57a89bdbabe603a002f21731198" + integrity sha512-Hhtrw0nLeSrFQ7phPp4OOcVjLPIeMnRlr5mcnVuMe7M/7eBn98A3hmFRLoFo3DLZkivSYwhRUJTyPyWAk56WLw== + dependencies: + is-core-module "^2.8.1" + path-parse "^1.0.7" + supports-preserve-symlinks-flag "^1.0.0" + +resolve@^2.0.0-next.3: + version "2.0.0-next.3" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-2.0.0-next.3.tgz#d41016293d4a8586a39ca5d9b5f15cbea1f55e46" + integrity sha512-W8LucSynKUIDu9ylraa7ueVZ7hc0uAgJBxVsQSKOXOyle8a93qXhcz+XAXZ8bIq2d6i4Ehddn6Evt+0/UwKk6Q== + dependencies: + is-core-module "^2.2.0" + path-parse "^1.0.6" + +restore-cursor@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-3.1.0.tgz#39f67c54b3a7a58cea5236d95cf0034239631f7e" + integrity sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA== + dependencies: + onetime "^5.1.0" + signal-exit "^3.0.2" + +retry@^0.13.1: + version "0.13.1" + resolved "https://registry.yarnpkg.com/retry/-/retry-0.13.1.tgz#185b1587acf67919d63b357349e03537b2484658" + integrity sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg== + +reusify@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" + integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== + +rimraf@3.0.2, rimraf@^3.0.0, rimraf@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" + integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== + dependencies: + glob "^7.1.3" + +robust-predicates@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/robust-predicates/-/robust-predicates-3.0.1.tgz#ecde075044f7f30118682bd9fb3f123109577f9a" + integrity sha512-ndEIpszUHiG4HtDsQLeIuMvRsDnn8c8rYStabochtUeCvfuvNptb5TUbVD68LRAILPX7p9nqQGh4xJgn3EHS/g== + +rollup-plugin-copy@^3.4.0: + version "3.4.0" + resolved "https://registry.yarnpkg.com/rollup-plugin-copy/-/rollup-plugin-copy-3.4.0.tgz#f1228a3ffb66ffad8606e2f3fb7ff23141ed3286" + integrity sha512-rGUmYYsYsceRJRqLVlE9FivJMxJ7X6jDlP79fmFkL8sJs7VVMSVyA2yfyL+PGyO/vJs4A87hwhgVfz61njI+uQ== + dependencies: + "@types/fs-extra" "^8.0.1" + colorette "^1.1.0" + fs-extra "^8.1.0" + globby "10.0.1" + is-plain-object "^3.0.0" + +rollup-plugin-import-css@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/rollup-plugin-import-css/-/rollup-plugin-import-css-3.0.2.tgz#7b718aba264b08cff1f171a83fbf0a1b36ad47ca" + integrity sha512-4Y/U5EMQHomMlYSF0OBOo/XJSgfou+iHMfBOqneaX5Cp5BCyQn1YrUtXC6KYEPHPxTadC+oXhrTCr9yzRN2DyA== + dependencies: + "@rollup/pluginutils" "^4.1.1" + +rollup-plugin-svg@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/rollup-plugin-svg/-/rollup-plugin-svg-2.0.0.tgz#ce11b55e915d5b2190328c4e6632bd6b4fe12ee9" + integrity sha512-DmE7dSQHo1SC5L2uH2qul3Mjyd5oV6U1aVVkyvTLX/mUsRink7f1b1zaIm+32GEBA6EHu8H/JJi3DdWqM53ySQ== + dependencies: + rollup-pluginutils "^1.3.1" + +rollup-plugin-svgo@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/rollup-plugin-svgo/-/rollup-plugin-svgo-1.1.0.tgz#092faa52753aa0ede52f2405bc58286f945614ae" + integrity sha512-wcOKsBXBErjmCQJZmmnLlAjcVQAUApfzsp/k8fx7u/5vKm0sUFk+IiBT2ylGUX6jUap3PNASAhiew88TJrH6Qg== + dependencies: + svgo "1.3.0" + +rollup-plugin-terser@^7.0.0, rollup-plugin-terser@^7.0.2: + version "7.0.2" + resolved "https://registry.yarnpkg.com/rollup-plugin-terser/-/rollup-plugin-terser-7.0.2.tgz#e8fbba4869981b2dc35ae7e8a502d5c6c04d324d" + integrity sha512-w3iIaU4OxcF52UUXiZNsNeuXIMDvFrr+ZXK6bFZ0Q60qyVfq4uLptoS4bbq3paG3x216eQllFZX7zt6TIImguQ== + dependencies: + "@babel/code-frame" "^7.10.4" + jest-worker "^26.2.1" + serialize-javascript "^4.0.0" + terser "^5.0.0" + +rollup-pluginutils@^1.3.1: + version "1.5.2" + resolved "https://registry.yarnpkg.com/rollup-pluginutils/-/rollup-pluginutils-1.5.2.tgz#1e156e778f94b7255bfa1b3d0178be8f5c552408" + integrity sha1-HhVud4+UtyVb+hs9AXi+j1xVJAg= + dependencies: + estree-walker "^0.2.1" + minimatch "^3.0.2" + +rollup@^2.43.1: + version "2.66.1" + resolved "https://registry.yarnpkg.com/rollup/-/rollup-2.66.1.tgz#366b0404de353c4331d538c3ad2963934fcb4937" + integrity sha512-crSgLhSkLMnKr4s9iZ/1qJCplgAgrRY+igWv8KhG/AjKOJ0YX/WpmANyn8oxrw+zenF3BXWDLa7Xl/QZISH+7w== + optionalDependencies: + fsevents "~2.3.2" + +rollup@^2.68.0: + version "2.68.0" + resolved "https://registry.yarnpkg.com/rollup/-/rollup-2.68.0.tgz#6ccabfd649447f8f21d62bf41662e5caece3bd66" + integrity sha512-XrMKOYK7oQcTio4wyTz466mucnd8LzkiZLozZ4Rz0zQD+HeX4nUK4B8GrTX/2EvN2/vBF/i2WnaXboPxo0JylA== + optionalDependencies: + fsevents "~2.3.2" + +run-async@^2.4.0: + version "2.4.1" + resolved "https://registry.yarnpkg.com/run-async/-/run-async-2.4.1.tgz#8440eccf99ea3e70bd409d49aab88e10c189a455" + integrity sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ== + +run-parallel@^1.1.9: + version "1.2.0" + resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee" + integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA== + dependencies: + queue-microtask "^1.2.2" + +rw@1: + version "1.3.3" + resolved "https://registry.yarnpkg.com/rw/-/rw-1.3.3.tgz#3f862dfa91ab766b14885ef4d01124bfda074fb4" + integrity sha1-P4Yt+pGrdmsUiF700BEkv9oHT7Q= + +rxjs@^7.2.0: + version "7.5.4" + resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-7.5.4.tgz#3d6bd407e6b7ce9a123e76b1e770dc5761aa368d" + integrity sha512-h5M3Hk78r6wAheJF0a5YahB1yRQKCsZ4MsGdZ5O9ETbVtjPcScGfrMmoOq7EBsCRzd4BDkvDJ7ogP8Sz5tTFiQ== + dependencies: + tslib "^2.1.0" + +safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: + version "5.1.2" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" + integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== + +safe-buffer@5.2.1, safe-buffer@>=5.1.0, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@~5.2.0: + version "5.2.1" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" + integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== + +"safer-buffer@>= 2.1.2 < 3", "safer-buffer@>= 2.1.2 < 3.0.0": + version "2.1.2" + resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" + integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== + +sanitize.css@*: + version "13.0.0" + resolved "https://registry.yarnpkg.com/sanitize.css/-/sanitize.css-13.0.0.tgz#2675553974b27964c75562ade3bd85d79879f173" + integrity sha512-ZRwKbh/eQ6w9vmTjkuG0Ioi3HBwPFce0O+v//ve+aOq1oeCy7jMV2qzzAlpsNuqpqCBjjriM1lbtZbF/Q8jVyA== + +sass-loader@^12.3.0: + version "12.4.0" + resolved "https://registry.yarnpkg.com/sass-loader/-/sass-loader-12.4.0.tgz#260b0d51a8a373bb8e88efc11f6ba5583fea0bcf" + integrity sha512-7xN+8khDIzym1oL9XyS6zP6Ges+Bo2B2xbPrjdMHEYyV3AQYhd/wXeru++3ODHF0zMjYmVadblSKrPrjEkL8mg== + dependencies: + klona "^2.0.4" + neo-async "^2.6.2" + +sax@~1.2.4: + version "1.2.4" + resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" + integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== + +saxes@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/saxes/-/saxes-5.0.1.tgz#eebab953fa3b7608dbe94e5dadb15c888fa6696d" + integrity sha512-5LBh1Tls8c9xgGjw3QrMwETmTMVk0oFgvrFSvWx62llR2hcEInrKNZ2GZCCuuy2lvWrdl5jhbpeqc5hRYKFOcw== + dependencies: + xmlchars "^2.2.0" + +scheduler@^0.20.2: + version "0.20.2" + resolved "https://registry.yarnpkg.com/scheduler/-/scheduler-0.20.2.tgz#4baee39436e34aa93b4874bddcbf0fe8b8b50e91" + integrity sha512-2eWfGgAqqWFGqtdMmcL5zCMK1U8KlXv8SQFGglL3CEtd0aDVDWgeF/YoCmvln55m5zSk3J/20hTaSBeSObsQDQ== + dependencies: + loose-envify "^1.1.0" + object-assign "^4.1.1" + +schema-utils@2.7.0: + version "2.7.0" + resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-2.7.0.tgz#17151f76d8eae67fbbf77960c33c676ad9f4efc7" + integrity sha512-0ilKFI6QQF5nxDZLFn2dMjvc4hjg/Wkg7rHd3jK6/A4a1Hl9VFdQWvgB1UMGoU94pad1P/8N7fMcEnLnSiju8A== + dependencies: + "@types/json-schema" "^7.0.4" + ajv "^6.12.2" + ajv-keywords "^3.4.1" + +schema-utils@^2.6.5: + version "2.7.1" + resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-2.7.1.tgz#1ca4f32d1b24c590c203b8e7a50bf0ea4cd394d7" + integrity sha512-SHiNtMOUGWBQJwzISiVYKu82GiV4QYGePp3odlY1tuKO7gPtphAT5R/py0fA6xtbgLL/RvtJZnU9b8s0F1q0Xg== + dependencies: + "@types/json-schema" "^7.0.5" + ajv "^6.12.4" + ajv-keywords "^3.5.2" + +schema-utils@^3.0.0, schema-utils@^3.1.0, schema-utils@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-3.1.1.tgz#bc74c4b6b6995c1d88f76a8b77bea7219e0c8281" + integrity sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw== + dependencies: + "@types/json-schema" "^7.0.8" + ajv "^6.12.5" + ajv-keywords "^3.5.2" + +schema-utils@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-4.0.0.tgz#60331e9e3ae78ec5d16353c467c34b3a0a1d3df7" + integrity sha512-1edyXKgh6XnJsJSQ8mKWXnN/BVaIbFMLpouRUrXgVq7WYne5kw3MW7UPhO44uRXQSIpTSXoJbmrR2X0w9kUTyg== + dependencies: + "@types/json-schema" "^7.0.9" + ajv "^8.8.0" + ajv-formats "^2.1.1" + ajv-keywords "^5.0.0" + +select-hose@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/select-hose/-/select-hose-2.0.0.tgz#625d8658f865af43ec962bfc376a37359a4994ca" + integrity sha1-Yl2GWPhlr0Psliv8N2o3NZpJlMo= + +selfsigned@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/selfsigned/-/selfsigned-2.0.0.tgz#e927cd5377cbb0a1075302cff8df1042cc2bce5b" + integrity sha512-cUdFiCbKoa1mZ6osuJs2uDHrs0k0oprsKveFiiaBKCNq3SYyb5gs2HxhQyDNLCmL51ZZThqi4YNDpCK6GOP1iQ== + dependencies: + node-forge "^1.2.0" + +semver@7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.0.0.tgz#5f3ca35761e47e05b206c6daff2cf814f0316b8e" + integrity sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A== + +semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.3.0: + version "6.3.0" + resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" + integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== + +semver@^7.3.2, semver@^7.3.5: + version "7.3.5" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.5.tgz#0b621c879348d8998e4b0e4be94b3f12e6018ef7" + integrity sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ== + dependencies: + lru-cache "^6.0.0" + +send@0.17.2: + version "0.17.2" + resolved "https://registry.yarnpkg.com/send/-/send-0.17.2.tgz#926622f76601c41808012c8bf1688fe3906f7820" + integrity sha512-UJYB6wFSJE3G00nEivR5rgWp8c2xXvJ3OPWPhmuteU0IKj8nKbG3DrjiOmLwpnHGYWAVwA69zmTm++YG0Hmwww== + dependencies: + debug "2.6.9" + depd "~1.1.2" + destroy "~1.0.4" + encodeurl "~1.0.2" + escape-html "~1.0.3" + etag "~1.8.1" + fresh "0.5.2" + http-errors "1.8.1" + mime "1.6.0" + ms "2.1.3" + on-finished "~2.3.0" + range-parser "~1.2.1" + statuses "~1.5.0" + +serialize-javascript@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-4.0.0.tgz#b525e1238489a5ecfc42afacc3fe99e666f4b1aa" + integrity sha512-GaNA54380uFefWghODBWEGisLZFj00nS5ACs6yHa9nLqlLpVLO8ChDGeKRjZnV4Nh4n0Qi7nhYZD/9fCPzEqkw== + dependencies: + randombytes "^2.1.0" + +serialize-javascript@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-6.0.0.tgz#efae5d88f45d7924141da8b5c3a7a7e663fefeb8" + integrity sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag== + dependencies: + randombytes "^2.1.0" + +serialize-query-params@^1.3.5: + version "1.3.6" + resolved "https://registry.yarnpkg.com/serialize-query-params/-/serialize-query-params-1.3.6.tgz#5dd5225db85ce747fe6fbc4897628504faafec6d" + integrity sha512-VlH7sfWNyPVZClPkRacopn6sn5uQMXBsjPVz1+pBHX895VpcYVznfJtZ49e6jymcrz+l/vowkepCZn/7xEAEdw== + +serve-index@^1.9.1: + version "1.9.1" + resolved "https://registry.yarnpkg.com/serve-index/-/serve-index-1.9.1.tgz#d3768d69b1e7d82e5ce050fff5b453bea12a9239" + integrity sha1-03aNabHn2C5c4FD/9bRTvqEqkjk= + dependencies: + accepts "~1.3.4" + batch "0.6.1" + debug "2.6.9" + escape-html "~1.0.3" + http-errors "~1.6.2" + mime-types "~2.1.17" + parseurl "~1.3.2" + +serve-static@1.14.2: + version "1.14.2" + resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.14.2.tgz#722d6294b1d62626d41b43a013ece4598d292bfa" + integrity sha512-+TMNA9AFxUEGuC0z2mevogSnn9MXKb4fa7ngeRMJaaGv8vTwnIEkKi+QGvPt33HSnf8pRS+WGM0EbMtCJLKMBQ== + dependencies: + encodeurl "~1.0.2" + escape-html "~1.0.3" + parseurl "~1.3.3" + send "0.17.2" + +set-cookie-parser@^2.4.6: + version "2.4.8" + resolved "https://registry.yarnpkg.com/set-cookie-parser/-/set-cookie-parser-2.4.8.tgz#d0da0ed388bc8f24e706a391f9c9e252a13c58b2" + integrity sha512-edRH8mBKEWNVIVMKejNnuJxleqYE/ZSdcT8/Nem9/mmosx12pctd80s2Oy00KNZzrogMZS5mauK2/ymL1bvlvg== + +setprototypeof@1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.0.tgz#d0bd85536887b6fe7c0d818cb962d9d91c54e656" + integrity sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ== + +setprototypeof@1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.2.0.tgz#66c9a24a73f9fc28cbe66b09fed3d33dcaf1b424" + integrity sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw== + +shebang-command@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" + integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== + dependencies: + shebang-regex "^3.0.0" + +shebang-regex@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" + integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== + +shell-quote@^1.7.3: + version "1.7.3" + resolved "https://registry.yarnpkg.com/shell-quote/-/shell-quote-1.7.3.tgz#aa40edac170445b9a431e17bb62c0b881b9c4123" + integrity sha512-Vpfqwm4EnqGdlsBFNmHhxhElJYrdfcxPThu+ryKS5J8L/fhAwLazFZtq+S+TWZ9ANj2piSQLGj6NQg+lKPmxrw== + +side-channel@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf" + integrity sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw== + dependencies: + call-bind "^1.0.0" + get-intrinsic "^1.0.2" + object-inspect "^1.9.0" + +signal-exit@^3.0.2, signal-exit@^3.0.3: + version "3.0.6" + resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.6.tgz#24e630c4b0f03fea446a2bd299e62b4a6ca8d0af" + integrity sha512-sDl4qMFpijcGw22U5w63KmD3cZJfBuFlVNbVMKje2keoKML7X2UzWbc4XrmEbDwg0NXJc3yv4/ox7b+JWb57kQ== + +sisteransi@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed" + integrity sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg== + +slash@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" + integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== + +slash@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/slash/-/slash-4.0.0.tgz#2422372176c4c6c5addb5e2ada885af984b396a7" + integrity sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew== + +sockjs@^0.3.21: + version "0.3.24" + resolved "https://registry.yarnpkg.com/sockjs/-/sockjs-0.3.24.tgz#c9bc8995f33a111bea0395ec30aa3206bdb5ccce" + integrity sha512-GJgLTZ7vYb/JtPSSZ10hsOYIvEYsjbNU+zPdIHcUaWVNUEPivzxku31865sSSud0Da0W4lEeOPlmw93zLQchuQ== + dependencies: + faye-websocket "^0.11.3" + uuid "^8.3.2" + websocket-driver "^0.7.4" + +source-list-map@^2.0.0, source-list-map@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/source-list-map/-/source-list-map-2.0.1.tgz#3993bd873bfc48479cca9ea3a547835c7c154b34" + integrity sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw== + +source-map-js@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.0.2.tgz#adbc361d9c62df380125e7f161f71c826f1e490c" + integrity sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw== + +source-map-loader@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/source-map-loader/-/source-map-loader-3.0.1.tgz#9ae5edc7c2d42570934be4c95d1ccc6352eba52d" + integrity sha512-Vp1UsfyPvgujKQzi4pyDiTOnE3E4H+yHvkVRN3c/9PJmQS4CQJExvcDvaX/D+RV+xQben9HJ56jMJS3CgUeWyA== + dependencies: + abab "^2.0.5" + iconv-lite "^0.6.3" + source-map-js "^1.0.1" + +source-map-resolve@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.6.0.tgz#3d9df87e236b53f16d01e58150fc7711138e5ed2" + integrity sha512-KXBr9d/fO/bWo97NXsPIAW1bFSBOuCnjbNTBMO7N59hsv5i9yzRDfcYwwt0l04+VqnKC+EwzvJZIP/qkuMgR/w== + dependencies: + atob "^2.1.2" + decode-uri-component "^0.2.0" + +source-map-support@^0.5.6, source-map-support@~0.5.20: + version "0.5.21" + resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f" + integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w== + dependencies: + buffer-from "^1.0.0" + source-map "^0.6.0" + +source-map-url@^0.4.0: + version "0.4.1" + resolved "https://registry.yarnpkg.com/source-map-url/-/source-map-url-0.4.1.tgz#0af66605a745a5a2f91cf1bbf8a7afbc283dec56" + integrity sha512-cPiFOTLUKvJFIg4SKVScy4ilPPW6rFgMgfuZJPNoDuMs3nC1HbMUycBoJw77xFIp6z1UJQJOfx6C9GMH80DiTw== + +source-map@0.6.1, source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.0, source-map@~0.6.1: + version "0.6.1" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" + integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== + +source-map@^0.5.0, source-map@^0.5.3: + version "0.5.7" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" + integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w= + +source-map@^0.7.3, source-map@~0.7.2: + version "0.7.3" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.7.3.tgz#5302f8169031735226544092e64981f751750383" + integrity sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ== + +source-map@^0.8.0-beta.0: + version "0.8.0-beta.0" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.8.0-beta.0.tgz#d4c1bb42c3f7ee925f005927ba10709e0d1d1f11" + integrity sha512-2ymg6oRBpebeZi9UUNsgQ89bhx01TcTkmNTGnNO88imTmbSgy4nfujrgVEFKWpMTEGA11EDkTt7mqObTPdigIA== + dependencies: + whatwg-url "^7.0.0" + +sourcemap-codec@^1.4.4: + version "1.4.8" + resolved "https://registry.yarnpkg.com/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz#ea804bd94857402e6992d05a38ef1ae35a9ab4c4" + integrity sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA== + +space-separated-tokens@^1.0.0: + version "1.1.5" + resolved "https://registry.yarnpkg.com/space-separated-tokens/-/space-separated-tokens-1.1.5.tgz#85f32c3d10d9682007e917414ddc5c26d1aa6899" + integrity sha512-q/JSVd1Lptzhf5bkYm4ob4iWPjx0KiRe3sRFBNrVqbJkFaBm5vbbowy1mymoPNLRa52+oadOhJ+K49wsSeSjTA== + +spdy-transport@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/spdy-transport/-/spdy-transport-3.0.0.tgz#00d4863a6400ad75df93361a1608605e5dcdcf31" + integrity sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw== + dependencies: + debug "^4.1.0" + detect-node "^2.0.4" + hpack.js "^2.1.6" + obuf "^1.1.2" + readable-stream "^3.0.6" + wbuf "^1.7.3" + +spdy@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/spdy/-/spdy-4.0.2.tgz#b74f466203a3eda452c02492b91fb9e84a27677b" + integrity sha512-r46gZQZQV+Kl9oItvl1JZZqJKGr+oEkB08A6BzkiR7593/7IbtuncXHd2YoYeTsG4157ZssMu9KYvUHLcjcDoA== + dependencies: + debug "^4.1.0" + handle-thing "^2.0.0" + http-deceiver "^1.2.7" + select-hose "^2.0.0" + spdy-transport "^3.0.0" + +split-on-first@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/split-on-first/-/split-on-first-1.1.0.tgz#f610afeee3b12bce1d0c30425e76398b78249a5f" + integrity sha512-43ZssAJaMusuKWL8sKUBQXHWOpq8d6CfN/u1p4gUzfJkM05C8rxTmYrkIPTXapZpORA6LkkzcUulJ8FqA7Uudw== + +sprintf-js@~1.0.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" + integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw= + +stable@^0.1.8: + version "0.1.8" + resolved "https://registry.yarnpkg.com/stable/-/stable-0.1.8.tgz#836eb3c8382fe2936feaf544631017ce7d47a3cf" + integrity sha512-ji9qxRnOVfcuLDySj9qzhGSEFVobyt1kIOSkj1qZzYLzq7Tos/oUUWvotUPQLlrsidqsK6tBH89Bc9kL5zHA6w== + +stack-utils@^2.0.3: + version "2.0.5" + resolved "https://registry.yarnpkg.com/stack-utils/-/stack-utils-2.0.5.tgz#d25265fca995154659dbbfba3b49254778d2fdd5" + integrity sha512-xrQcmYhOsn/1kX+Vraq+7j4oE2j/6BFscZ0etmYg81xuM8Gq0022Pxb8+IqgOFUIaxHs0KaSb7T1+OegiNrNFA== + dependencies: + escape-string-regexp "^2.0.0" + +stackframe@^1.1.1: + version "1.2.0" + resolved "https://registry.yarnpkg.com/stackframe/-/stackframe-1.2.0.tgz#52429492d63c62eb989804c11552e3d22e779303" + integrity sha512-GrdeshiRmS1YLMYgzF16olf2jJ/IzxXY9lhKOskuVziubpTYcYqyOwYeJKzQkwy7uN0fYSsbsC4RQaXf9LCrYA== + +state-toggle@^1.0.0: + version "1.0.3" + resolved "https://registry.yarnpkg.com/state-toggle/-/state-toggle-1.0.3.tgz#e123b16a88e143139b09c6852221bc9815917dfe" + integrity sha512-d/5Z4/2iiCnHw6Xzghyhb+GcmF89bxwgXG60wjIiZaxnymbyOmI8Hk4VqHXiVVp6u2ysaskFfXg3ekCj4WNftQ== + +"statuses@>= 1.4.0 < 2", "statuses@>= 1.5.0 < 2", statuses@~1.5.0: + version "1.5.0" + resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c" + integrity sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow= + +statuses@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/statuses/-/statuses-2.0.1.tgz#55cb000ccf1d48728bd23c685a063998cf1a1b63" + integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ== + +strict-event-emitter@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/strict-event-emitter/-/strict-event-emitter-0.2.0.tgz#78e2f75dc6ea502e5d8a877661065a1e2deedecd" + integrity sha512-zv7K2egoKwkQkZGEaH8m+i2D0XiKzx5jNsiSul6ja2IYFvil10A59Z9Y7PPAAe5OW53dQUf9CfsHKzjZzKkm1w== + dependencies: + events "^3.3.0" + +strict-uri-encode@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/strict-uri-encode/-/strict-uri-encode-2.0.0.tgz#b9c7330c7042862f6b142dc274bbcc5866ce3546" + integrity sha1-ucczDHBChi9rFC3CdLvMWGbONUY= + +string-length@^4.0.1: + version "4.0.2" + resolved "https://registry.yarnpkg.com/string-length/-/string-length-4.0.2.tgz#a8a8dc7bd5c1a82b9b3c8b87e125f66871b6e57a" + integrity sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ== + dependencies: + char-regex "^1.0.2" + strip-ansi "^6.0.0" + +string-length@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/string-length/-/string-length-5.0.1.tgz#3d647f497b6e8e8d41e422f7e0b23bc536c8381e" + integrity sha512-9Ep08KAMUn0OadnVaBuRdE2l615CQ508kr0XMadjClfYpdCyvrbFp6Taebo8yyxokQ4viUd/xPPUA4FGgUa0ow== + dependencies: + char-regex "^2.0.0" + strip-ansi "^7.0.1" + +string-natural-compare@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/string-natural-compare/-/string-natural-compare-3.0.1.tgz#7a42d58474454963759e8e8b7ae63d71c1e7fdf4" + integrity sha512-n3sPwynL1nwKi3WJ6AIsClwBMa0zTi54fn2oLU6ndfTSIO05xaznjSf15PcBZU6FNWbmN5Q6cxT4V5hGvB4taw== + +string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: + version "4.2.3" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + +string.prototype.matchall@^4.0.6: + version "4.0.6" + resolved "https://registry.yarnpkg.com/string.prototype.matchall/-/string.prototype.matchall-4.0.6.tgz#5abb5dabc94c7b0ea2380f65ba610b3a544b15fa" + integrity sha512-6WgDX8HmQqvEd7J+G6VtAahhsQIssiZ8zl7zKh1VDMFyL3hRTJP4FTNA3RbIp2TOQ9AYNDcc7e3fH0Qbup+DBg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.1" + get-intrinsic "^1.1.1" + has-symbols "^1.0.2" + internal-slot "^1.0.3" + regexp.prototype.flags "^1.3.1" + side-channel "^1.0.4" + +string.prototype.trimend@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/string.prototype.trimend/-/string.prototype.trimend-1.0.4.tgz#e75ae90c2942c63504686c18b287b4a0b1a45f80" + integrity sha512-y9xCjw1P23Awk8EvTpcyL2NIr1j7wJ39f+k6lvRnSMz+mz9CGz9NYPelDk42kOz6+ql8xjfK8oYzy3jAP5QU5A== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + +string.prototype.trimstart@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.4.tgz#b36399af4ab2999b4c9c648bd7a3fb2bb26feeed" + integrity sha512-jh6e984OBfvxS50tdY2nRZnoC5/mLFKOREQfw8t5yytkoUsJRNxvI/E39qu1sD0OtWI3OC0XgKSmcWwziwYuZw== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + +string_decoder@^1.1.1: + version "1.3.0" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" + integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== + dependencies: + safe-buffer "~5.2.0" + +string_decoder@~1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" + integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== + dependencies: + safe-buffer "~5.1.0" + +stringify-entities@^3.0.1: + version "3.1.0" + resolved "https://registry.yarnpkg.com/stringify-entities/-/stringify-entities-3.1.0.tgz#b8d3feac256d9ffcc9fa1fefdcf3ca70576ee903" + integrity sha512-3FP+jGMmMV/ffZs86MoghGqAoqXAdxLrJP4GUdrDN1aIScYih5tuIO3eF4To5AJZ79KDZ8Fpdy7QJnK8SsL1Vg== + dependencies: + character-entities-html4 "^1.0.0" + character-entities-legacy "^1.0.0" + xtend "^4.0.0" + +stringify-object@^3.3.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/stringify-object/-/stringify-object-3.3.0.tgz#703065aefca19300d3ce88af4f5b3956d7556629" + integrity sha512-rHqiFh1elqCQ9WPLIC8I0Q/g/wj5J1eMkyoiD6eoQApWHP0FtlK7rqnhmabL5VUY9JQCcqwwvlOaSuutekgyrw== + dependencies: + get-own-enumerable-property-symbols "^3.0.0" + is-obj "^1.0.1" + is-regexp "^1.0.0" + +strip-ansi@^6.0.0, strip-ansi@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + +strip-ansi@^7.0.0, strip-ansi@^7.0.1: + version "7.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.0.1.tgz#61740a08ce36b61e50e65653f07060d000975fb2" + integrity sha512-cXNxvT8dFNRVfhVME3JAe98mkXDYN2O1l7jmcwMnOslDeESg1rF/OZMtK0nRAhiari1unG5cD4jG3rapUAkLbw== + dependencies: + ansi-regex "^6.0.1" + +strip-bom@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" + integrity sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM= + +strip-bom@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-4.0.0.tgz#9c3505c1db45bcedca3d9cf7a16f5c5aa3901878" + integrity sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w== + +strip-comments@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/strip-comments/-/strip-comments-2.0.1.tgz#4ad11c3fbcac177a67a40ac224ca339ca1c1ba9b" + integrity sha512-ZprKx+bBLXv067WTCALv8SSz5l2+XhpYCsVtSqlMnkAXMWDq+/ekVbl1ghqP9rUHTzv6sm/DwCOiYutU/yp1fw== + +strip-final-newline@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad" + integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA== + +strip-indent@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/strip-indent/-/strip-indent-3.0.0.tgz#c32e1cee940b6b3432c771bc2c54bcce73cd3001" + integrity sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ== + dependencies: + min-indent "^1.0.0" + +strip-json-comments@^3.1.0, strip-json-comments@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" + integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== + +style-loader@^3.3.1: + version "3.3.1" + resolved "https://registry.yarnpkg.com/style-loader/-/style-loader-3.3.1.tgz#057dfa6b3d4d7c7064462830f9113ed417d38575" + integrity sha512-GPcQ+LDJbrcxHORTRes6Jy2sfvK2kS6hpSfI/fXhPt+spVzxF6LJ1dHLN9zIGmVaaP044YKaIatFaufENRiDoQ== + +style-to-object@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/style-to-object/-/style-to-object-0.3.0.tgz#b1b790d205991cc783801967214979ee19a76e46" + integrity sha512-CzFnRRXhzWIdItT3OmF8SQfWyahHhjq3HwcMNCNLn+N7klOOqPjMeG/4JSu77D7ypZdGvSzvkrbyeTMizz2VrA== + dependencies: + inline-style-parser "0.1.1" + +stylehacks@^5.0.2: + version "5.0.2" + resolved "https://registry.yarnpkg.com/stylehacks/-/stylehacks-5.0.2.tgz#fa10e5181c6e8dc0bddb4a3fb372e9ac42bba2ad" + integrity sha512-114zeJdOpTrbQYRD4OU5UWJ99LKUaqCPJTU1HQ/n3q3BwmllFN8kHENaLnOeqVq6AhXrWfxHNZTl33iJ4oy3cQ== + dependencies: + browserslist "^4.16.6" + postcss-selector-parser "^6.0.4" + +stylis@4.0.13: + version "4.0.13" + resolved "https://registry.yarnpkg.com/stylis/-/stylis-4.0.13.tgz#f5db332e376d13cc84ecfe5dace9a2a51d954c91" + integrity sha512-xGPXiFVl4YED9Jh7Euv2V220mriG9u4B2TA6Ybjc1catrstKD2PpIdU3U0RKpkVBC2EhmL/F0sPCr9vrFTNRag== + +supports-color@^5.3.0: + version "5.5.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" + integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== + dependencies: + has-flag "^3.0.0" + +supports-color@^7.0.0, supports-color@^7.1.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" + integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== + dependencies: + has-flag "^4.0.0" + +supports-color@^8.0.0: + version "8.1.1" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-8.1.1.tgz#cd6fc17e28500cff56c1b86c0a7fd4a54a73005c" + integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q== + dependencies: + has-flag "^4.0.0" + +supports-hyperlinks@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/supports-hyperlinks/-/supports-hyperlinks-2.2.0.tgz#4f77b42488765891774b70c79babd87f9bd594bb" + integrity sha512-6sXEzV5+I5j8Bmq9/vUphGRM/RJNT9SCURJLjwfOg51heRtguGWDzcaBlgAzKhQa0EVNpPEKzQuBwZ8S8WaCeQ== + dependencies: + has-flag "^4.0.0" + supports-color "^7.0.0" + +supports-preserve-symlinks-flag@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" + integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== + +svg-parser@^2.0.2: + version "2.0.4" + resolved "https://registry.yarnpkg.com/svg-parser/-/svg-parser-2.0.4.tgz#fdc2e29e13951736140b76cb122c8ee6630eb6b5" + integrity sha512-e4hG1hRwoOdRb37cIMSgzNsxyzKfayW6VOflrwvR+/bzrkyxY/31WkbgnQpgtrNp1SdpJvpUAGTa/ZoiPNDuRQ== + +svgo@1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/svgo/-/svgo-1.3.0.tgz#bae51ba95ded9a33a36b7c46ce9c359ae9154313" + integrity sha512-MLfUA6O+qauLDbym+mMZgtXCGRfIxyQoeH6IKVcFslyODEe/ElJNwr0FohQ3xG4C6HK6bk3KYPPXwHVJk3V5NQ== + dependencies: + chalk "^2.4.1" + coa "^2.0.2" + css-select "^2.0.0" + css-select-base-adapter "^0.1.1" + css-tree "1.0.0-alpha.33" + csso "^3.5.1" + js-yaml "^3.13.1" + mkdirp "~0.5.1" + object.values "^1.1.0" + sax "~1.2.4" + stable "^0.1.8" + unquote "~1.1.1" + util.promisify "~1.0.0" + +svgo@^1.2.2: + version "1.3.2" + resolved "https://registry.yarnpkg.com/svgo/-/svgo-1.3.2.tgz#b6dc511c063346c9e415b81e43401145b96d4167" + integrity sha512-yhy/sQYxR5BkC98CY7o31VGsg014AKLEPxdfhora76l36hD9Rdy5NZA/Ocn6yayNPgSamYdtX2rFJdcv07AYVw== + dependencies: + chalk "^2.4.1" + coa "^2.0.2" + css-select "^2.0.0" + css-select-base-adapter "^0.1.1" + css-tree "1.0.0-alpha.37" + csso "^4.0.2" + js-yaml "^3.13.1" + mkdirp "~0.5.1" + object.values "^1.1.0" + sax "~1.2.4" + stable "^0.1.8" + unquote "~1.1.1" + util.promisify "~1.0.0" + +svgo@^2.7.0: + version "2.8.0" + resolved "https://registry.yarnpkg.com/svgo/-/svgo-2.8.0.tgz#4ff80cce6710dc2795f0c7c74101e6764cfccd24" + integrity sha512-+N/Q9kV1+F+UeWYoSiULYo4xYSDQlTgb+ayMobAXPwMnLvop7oxKMo9OzIrX5x3eS4L4f2UHhc9axXwY8DpChg== + dependencies: + "@trysound/sax" "0.2.0" + commander "^7.2.0" + css-select "^4.1.3" + css-tree "^1.1.3" + csso "^4.2.0" + picocolors "^1.0.0" + stable "^0.1.8" + +symbol-tree@^3.2.4: + version "3.2.4" + resolved "https://registry.yarnpkg.com/symbol-tree/-/symbol-tree-3.2.4.tgz#430637d248ba77e078883951fb9aa0eed7c63fa2" + integrity sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw== + +tabbable@^3.0.0: + version "3.1.2" + resolved "https://registry.yarnpkg.com/tabbable/-/tabbable-3.1.2.tgz#f2d16cccd01f400e38635c7181adfe0ad965a4a2" + integrity sha512-wjB6puVXTYO0BSFtCmWQubA/KIn7Xvajw0x0l6eJUudMG/EAiJvIUnyNX6xO4NpGrJ16lbD0eUseB9WxW0vlpQ== + +tailwindcss@^3.0.2: + version "3.0.18" + resolved "https://registry.yarnpkg.com/tailwindcss/-/tailwindcss-3.0.18.tgz#ea4825e6496d77dc21877b6b61c7cc56cda3add5" + integrity sha512-ihPTpEyA5ANgZbwKlgrbfnzOp9R5vDHFWmqxB1PT8NwOGCOFVVMl+Ps1cQQ369acaqqf1BEF77roCwK0lvNmTw== + dependencies: + arg "^5.0.1" + chalk "^4.1.2" + chokidar "^3.5.3" + color-name "^1.1.4" + cosmiconfig "^7.0.1" + detective "^5.2.0" + didyoumean "^1.2.2" + dlv "^1.1.3" + fast-glob "^3.2.11" + glob-parent "^6.0.2" + is-glob "^4.0.3" + normalize-path "^3.0.0" + object-hash "^2.2.0" + postcss-js "^4.0.0" + postcss-load-config "^3.1.0" + postcss-nested "5.0.6" + postcss-selector-parser "^6.0.9" + postcss-value-parser "^4.2.0" + quick-lru "^5.1.1" + resolve "^1.21.0" + +tapable@^1.0.0: + version "1.1.3" + resolved "https://registry.yarnpkg.com/tapable/-/tapable-1.1.3.tgz#a1fccc06b58db61fd7a45da2da44f5f3a3e67ba2" + integrity sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA== + +tapable@^2.0.0, tapable@^2.1.1, tapable@^2.2.0: + version "2.2.1" + resolved "https://registry.yarnpkg.com/tapable/-/tapable-2.2.1.tgz#1967a73ef4060a82f12ab96af86d52fdb76eeca0" + integrity sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ== + +temp-dir@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/temp-dir/-/temp-dir-2.0.0.tgz#bde92b05bdfeb1516e804c9c00ad45177f31321e" + integrity sha512-aoBAniQmmwtcKp/7BzsH8Cxzv8OL736p7v1ihGb5e9DJ9kTwGWHrQrVB5+lfVDzfGrdRzXch+ig7LHaY1JTOrg== + +tempy@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/tempy/-/tempy-0.6.0.tgz#65e2c35abc06f1124a97f387b08303442bde59f3" + integrity sha512-G13vtMYPT/J8A4X2SjdtBTphZlrp1gKv6hZiOjw14RCWg6GbHuQBGtjlx75xLbYV/wEc0D7G5K4rxKP/cXk8Bw== + dependencies: + is-stream "^2.0.0" + temp-dir "^2.0.0" + type-fest "^0.16.0" + unique-string "^2.0.0" + +terminal-link@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/terminal-link/-/terminal-link-2.1.1.tgz#14a64a27ab3c0df933ea546fba55f2d078edc994" + integrity sha512-un0FmiRUQNr5PJqy9kP7c40F5BOfpGlYTrxonDChEZB7pzZxRNp/bt+ymiy9/npwXya9KH99nJ/GXFIiUkYGFQ== + dependencies: + ansi-escapes "^4.2.1" + supports-hyperlinks "^2.0.0" + +terser-webpack-plugin@^5.1.3, terser-webpack-plugin@^5.2.5: + version "5.3.0" + resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-5.3.0.tgz#21641326486ecf91d8054161c816e464435bae9f" + integrity sha512-LPIisi3Ol4chwAaPP8toUJ3L4qCM1G0wao7L3qNv57Drezxj6+VEyySpPw4B1HSO2Eg/hDY/MNF5XihCAoqnsQ== + dependencies: + jest-worker "^27.4.1" + schema-utils "^3.1.1" + serialize-javascript "^6.0.0" + source-map "^0.6.1" + terser "^5.7.2" + +terser@^5.0.0, terser@^5.10.0, terser@^5.7.2: + version "5.10.0" + resolved "https://registry.yarnpkg.com/terser/-/terser-5.10.0.tgz#b86390809c0389105eb0a0b62397563096ddafcc" + integrity sha512-AMmF99DMfEDiRJfxfY5jj5wNH/bYO09cniSqhfoyxc8sFoYIgkJy86G04UoZU5VjlpnplVu0K6Tx6E9b5+DlHA== + dependencies: + commander "^2.20.0" + source-map "~0.7.2" + source-map-support "~0.5.20" + +test-exclude@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" + integrity sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w== + dependencies: + "@istanbuljs/schema" "^0.1.2" + glob "^7.1.4" + minimatch "^3.0.4" + +text-diff@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/text-diff/-/text-diff-1.0.1.tgz#6c105905435e337857375c9d2f6ca63e453ff565" + integrity sha1-bBBZBUNeM3hXN1ydL2ymPkU/9WU= + +text-table@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" + integrity sha1-f17oI66AUgfACvLfSoTsP8+lcLQ= + +throat@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/throat/-/throat-6.0.1.tgz#d514fedad95740c12c2d7fc70ea863eb51ade375" + integrity sha512-8hmiGIJMDlwjg7dlJ4yKGLK8EsYqKgPWbG3b4wjJddKNwc7N7Dpn08Df4szr/sZdMVeOstrdYSsqzX6BYbcB+w== + +through@^2.3.6: + version "2.3.8" + resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" + integrity sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU= + +thunky@^1.0.2: + version "1.1.0" + resolved "https://registry.yarnpkg.com/thunky/-/thunky-1.1.0.tgz#5abaf714a9405db0504732bbccd2cedd9ef9537d" + integrity sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA== + +timsort@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/timsort/-/timsort-0.3.0.tgz#405411a8e7e6339fe64db9a234de11dc31e02bd4" + integrity sha1-QFQRqOfmM5/mTbmiNN4R3DHgK9Q= + +tiny-invariant@^1.0.6: + version "1.2.0" + resolved "https://registry.yarnpkg.com/tiny-invariant/-/tiny-invariant-1.2.0.tgz#a1141f86b672a9148c72e978a19a73b9b94a15a9" + integrity sha512-1Uhn/aqw5C6RI4KejVeTg6mIS7IqxnLJ8Mv2tV5rTc0qWobay7pDUz6Wi392Cnc8ak1H0F2cjoRzb2/AW4+Fvg== + +tmp@^0.0.33: + version "0.0.33" + resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9" + integrity sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw== + dependencies: + os-tmpdir "~1.0.2" + +tmpl@1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc" + integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw== + +to-fast-properties@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" + integrity sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4= + +to-regex-range@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" + integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== + dependencies: + is-number "^7.0.0" + +toidentifier@1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35" + integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA== + +tough-cookie@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-4.0.0.tgz#d822234eeca882f991f0f908824ad2622ddbece4" + integrity sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg== + dependencies: + psl "^1.1.33" + punycode "^2.1.1" + universalify "^0.1.2" + +tr46@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/tr46/-/tr46-1.0.1.tgz#a8b13fd6bfd2489519674ccde55ba3693b706d09" + integrity sha1-qLE/1r/SSJUZZ0zN5VujaTtwbQk= + dependencies: + punycode "^2.1.0" + +tr46@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/tr46/-/tr46-2.1.0.tgz#fa87aa81ca5d5941da8cbf1f9b749dc969a4e240" + integrity sha512-15Ih7phfcdP5YxqiB+iDtLoaTz4Nd35+IiAv0kQ5FNKHzXgdWqPoTIqEDDJmXceQt4JZk6lVPT8lnDlPpGDppw== + dependencies: + punycode "^2.1.1" + +tr46@~0.0.3: + version "0.0.3" + resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" + integrity sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o= + +trim-trailing-lines@^1.0.0: + version "1.1.4" + resolved "https://registry.yarnpkg.com/trim-trailing-lines/-/trim-trailing-lines-1.1.4.tgz#bd4abbec7cc880462f10b2c8b5ce1d8d1ec7c2c0" + integrity sha512-rjUWSqnfTNrjbB9NQWfPMH/xRK1deHeGsHoVfpxJ++XeYXE0d6B1En37AHfw3jtfTU7dzMzZL2jjpe8Qb5gLIQ== + +trim@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/trim/-/trim-0.0.1.tgz#5858547f6b290757ee95cccc666fb50084c460dd" + integrity sha1-WFhUf2spB1fulczMZm+1AITEYN0= + +trough@^1.0.0: + version "1.0.5" + resolved "https://registry.yarnpkg.com/trough/-/trough-1.0.5.tgz#b8b639cefad7d0bb2abd37d433ff8293efa5f406" + integrity sha512-rvuRbTarPXmMb79SmzEp8aqXNKcK+y0XaB298IXueQ8I2PsrATcPBCSPyK/dDNa2iWOhKlfNnOjdAOTBU/nkFA== + +tryer@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/tryer/-/tryer-1.0.1.tgz#f2c85406800b9b0f74c9f7465b81eaad241252f8" + integrity sha512-c3zayb8/kWWpycWYg87P71E1S1ZL6b6IJxfb5fvsUgsf0S2MVGaDhDXXjDMpdCpfWXqptc+4mXwmiy1ypXqRAA== + +tsconfig-paths@^3.12.0: + version "3.12.0" + resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-3.12.0.tgz#19769aca6ee8f6a1a341e38c8fa45dd9fb18899b" + integrity sha512-e5adrnOYT6zqVnWqZu7i/BQ3BnhzvGbjEjejFXO20lKIKpwTaupkCPgEfv4GZK1IBciJUEhYs3J3p75FdaTFVg== + dependencies: + "@types/json5" "^0.0.29" + json5 "^1.0.1" + minimist "^1.2.0" + strip-bom "^3.0.0" + +tslib@^1.0.0, tslib@^1.8.1, tslib@^1.9.3: + version "1.14.1" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" + integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== + +tslib@^2.0.3, tslib@^2.1.0, tslib@^2.3.1: + version "2.3.1" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.3.1.tgz#e8a335add5ceae51aa261d32a490158ef042ef01" + integrity sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw== + +tsutils@^3.21.0: + version "3.21.0" + resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-3.21.0.tgz#b48717d394cea6c1e096983eed58e9d61715b623" + integrity sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA== + dependencies: + tslib "^1.8.1" + +type-check@^0.4.0, type-check@~0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1" + integrity sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew== + dependencies: + prelude-ls "^1.2.1" + +type-check@~0.3.2: + version "0.3.2" + resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" + integrity sha1-WITKtRLPHTVeP7eE8wgEsrUg23I= + dependencies: + prelude-ls "~1.1.2" + +type-detect@4.0.8: + version "4.0.8" + resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c" + integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g== + +type-fest@^0.16.0: + version "0.16.0" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.16.0.tgz#3240b891a78b0deae910dbeb86553e552a148860" + integrity sha512-eaBzG6MxNzEn9kiwvtre90cXaNLkmadMWa1zQMs3XORCXNbsH/OewwbxC5ia9dCxIxnTAsSxXJaa/p5y8DlvJg== + +type-fest@^0.20.2: + version "0.20.2" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4" + integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ== + +type-fest@^0.21.3: + version "0.21.3" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.21.3.tgz#d260a24b0198436e133fa26a524a6d65fa3b2e37" + integrity sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w== + +type-fest@^1.2.2: + version "1.4.0" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-1.4.0.tgz#e9fb813fe3bf1744ec359d55d1affefa76f14be1" + integrity sha512-yGSza74xk0UG8k+pLh5oeoYirvIiWo5t0/o3zHHAO2tRDiZcxWP7fywNlXhqb6/r6sWvwi+RsyQMWhVLe4BVuA== + +type-is@~1.6.18: + version "1.6.18" + resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131" + integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== + dependencies: + media-typer "0.3.0" + mime-types "~2.1.24" + +typedarray-to-buffer@^3.1.5: + version "3.1.5" + resolved "https://registry.yarnpkg.com/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz#a97ee7a9ff42691b9f783ff1bc5112fe3fca9080" + integrity sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q== + dependencies: + is-typedarray "^1.0.0" + +typescript@^4.4.2: + version "4.5.5" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.5.5.tgz#d8c953832d28924a9e3d37c73d729c846c5896f3" + integrity sha512-TCTIul70LyWe6IJWT8QSYeA54WQe8EjQFU4wY52Fasj5UKx88LNYKCgBEHcOMOrFF1rKGbD8v/xcNWVUq9SymA== + +unbox-primitive@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/unbox-primitive/-/unbox-primitive-1.0.1.tgz#085e215625ec3162574dc8859abee78a59b14471" + integrity sha512-tZU/3NqK3dA5gpE1KtyiJUrEB0lxnGkMFHptJ7q6ewdZ8s12QrODwNbhIJStmJkd1QDXa1NRA8aF2A1zk/Ypyw== + dependencies: + function-bind "^1.1.1" + has-bigints "^1.0.1" + has-symbols "^1.0.2" + which-boxed-primitive "^1.0.2" + +unherit@^1.0.4: + version "1.1.3" + resolved "https://registry.yarnpkg.com/unherit/-/unherit-1.1.3.tgz#6c9b503f2b41b262330c80e91c8614abdaa69c22" + integrity sha512-Ft16BJcnapDKp0+J/rqFC3Rrk6Y/Ng4nzsC028k2jdDII/rdZ7Wd3pPT/6+vIIxRagwRc9K0IUX0Ra4fKvw+WQ== + dependencies: + inherits "^2.0.0" + xtend "^4.0.0" + +unicode-canonical-property-names-ecmascript@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz#301acdc525631670d39f6146e0e77ff6bbdebddc" + integrity sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ== + +unicode-match-property-ecmascript@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz#54fd16e0ecb167cf04cf1f756bdcc92eba7976c3" + integrity sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q== + dependencies: + unicode-canonical-property-names-ecmascript "^2.0.0" + unicode-property-aliases-ecmascript "^2.0.0" + +unicode-match-property-value-ecmascript@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.0.0.tgz#1a01aa57247c14c568b89775a54938788189a714" + integrity sha512-7Yhkc0Ye+t4PNYzOGKedDhXbYIBe1XEQYQxOPyhcXNMJ0WCABqqj6ckydd6pWRZTHV4GuCPKdBAUiMc60tsKVw== + +unicode-property-aliases-ecmascript@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.0.0.tgz#0a36cb9a585c4f6abd51ad1deddb285c165297c8" + integrity sha512-5Zfuy9q/DFr4tfO7ZPeVXb1aPoeQSdeFMLpYuFebehDAhbuevLs5yxSZmIFN1tP5F9Wl4IpJrYojg85/zgyZHQ== + +unified@^9.2.0: + version "9.2.2" + resolved "https://registry.yarnpkg.com/unified/-/unified-9.2.2.tgz#67649a1abfc3ab85d2969502902775eb03146975" + integrity sha512-Sg7j110mtefBD+qunSLO1lqOEKdrwBFBrR6Qd8f4uwkhWNlbkaqwHse6e7QvD3AP/MNoJdEDLaf8OxYyoWgorQ== + dependencies: + bail "^1.0.0" + extend "^3.0.0" + is-buffer "^2.0.0" + is-plain-obj "^2.0.0" + trough "^1.0.0" + vfile "^4.0.0" + +unique-string@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/unique-string/-/unique-string-2.0.0.tgz#39c6451f81afb2749de2b233e3f7c5e8843bd89d" + integrity sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg== + dependencies: + crypto-random-string "^2.0.0" + +unist-builder@^2.0.0: + version "2.0.3" + resolved "https://registry.yarnpkg.com/unist-builder/-/unist-builder-2.0.3.tgz#77648711b5d86af0942f334397a33c5e91516436" + integrity sha512-f98yt5pnlMWlzP539tPc4grGMsFaQQlP/vM396b00jngsiINumNmsY8rkXjfoi1c6QaM8nQ3vaGDuoKWbe/1Uw== + +unist-util-generated@^1.0.0: + version "1.1.6" + resolved "https://registry.yarnpkg.com/unist-util-generated/-/unist-util-generated-1.1.6.tgz#5ab51f689e2992a472beb1b35f2ce7ff2f324d4b" + integrity sha512-cln2Mm1/CZzN5ttGK7vkoGw+RZ8VcUH6BtGbq98DDtRGquAAOXig1mrBQYelOwMXYS8rK+vZDyyojSjp7JX+Lg== + +unist-util-is@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/unist-util-is/-/unist-util-is-3.0.0.tgz#d9e84381c2468e82629e4a5be9d7d05a2dd324cd" + integrity sha512-sVZZX3+kspVNmLWBPAB6r+7D9ZgAFPNWm66f7YNb420RlQSbn+n8rG8dGZSkrER7ZIXGQYNm5pqC3v3HopH24A== + +unist-util-is@^4.0.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/unist-util-is/-/unist-util-is-4.1.0.tgz#976e5f462a7a5de73d94b706bac1b90671b57797" + integrity sha512-ZOQSsnce92GrxSqlnEEseX0gi7GH9zTJZ0p9dtu87WRb/37mMPO2Ilx1s/t9vBHrFhbgweUwb+t7cIn5dxPhZg== + +unist-util-position@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/unist-util-position/-/unist-util-position-3.1.0.tgz#1c42ee6301f8d52f47d14f62bbdb796571fa2d47" + integrity sha512-w+PkwCbYSFw8vpgWD0v7zRCl1FpY3fjDSQ3/N/wNd9Ffa4gPi8+4keqt99N3XW6F99t/mUzp2xAhNmfKWp95QA== + +unist-util-remove-position@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/unist-util-remove-position/-/unist-util-remove-position-2.0.1.tgz#5d19ca79fdba712301999b2b73553ca8f3b352cc" + integrity sha512-fDZsLYIe2uT+oGFnuZmy73K6ZxOPG/Qcm+w7jbEjaFcJgbQ6cqjs/eSPzXhsmGpAsWPkqZM9pYjww5QTn3LHMA== + dependencies: + unist-util-visit "^2.0.0" + +unist-util-stringify-position@^2.0.0: + version "2.0.3" + resolved "https://registry.yarnpkg.com/unist-util-stringify-position/-/unist-util-stringify-position-2.0.3.tgz#cce3bfa1cdf85ba7375d1d5b17bdc4cada9bd9da" + integrity sha512-3faScn5I+hy9VleOq/qNbAd6pAx7iH5jYBMS9I1HgQVijz/4mv5Bvw5iw1sC/90CODiKo81G/ps8AJrISn687g== + dependencies: + "@types/unist" "^2.0.2" + +unist-util-stringify-position@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/unist-util-stringify-position/-/unist-util-stringify-position-3.0.0.tgz#d517d2883d74d0daa0b565adc3d10a02b4a8cde9" + integrity sha512-SdfAl8fsDclywZpfMDTVDxA2V7LjtRDTOFd44wUJamgl6OlVngsqWjxvermMYf60elWHbxhuRCZml7AnuXCaSA== + dependencies: + "@types/unist" "^2.0.0" + +unist-util-visit-parents@^2.0.0: + version "2.1.2" + resolved "https://registry.yarnpkg.com/unist-util-visit-parents/-/unist-util-visit-parents-2.1.2.tgz#25e43e55312166f3348cae6743588781d112c1e9" + integrity sha512-DyN5vD4NE3aSeB+PXYNKxzGsfocxp6asDc2XXE3b0ekO2BaRUpBicbbUygfSvYfUz1IkmjFR1YF7dPklraMZ2g== + dependencies: + unist-util-is "^3.0.0" + +unist-util-visit-parents@^3.0.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/unist-util-visit-parents/-/unist-util-visit-parents-3.1.1.tgz#65a6ce698f78a6b0f56aa0e88f13801886cdaef6" + integrity sha512-1KROIZWo6bcMrZEwiH2UrXDyalAa0uqzWCxCJj6lPOvTve2WkfgCytoDTPaMnodXh1WrXOq0haVYHj99ynJlsg== + dependencies: + "@types/unist" "^2.0.0" + unist-util-is "^4.0.0" + +unist-util-visit@^1.4.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/unist-util-visit/-/unist-util-visit-1.4.1.tgz#4724aaa8486e6ee6e26d7ff3c8685960d560b1e3" + integrity sha512-AvGNk7Bb//EmJZyhtRUnNMEpId/AZ5Ph/KUpTI09WHQuDZHKovQ1oEv3mfmKpWKtoMzyMC4GLBm1Zy5k12fjIw== + dependencies: + unist-util-visit-parents "^2.0.0" + +unist-util-visit@^2.0.0, unist-util-visit@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/unist-util-visit/-/unist-util-visit-2.0.3.tgz#c3703893146df47203bb8a9795af47d7b971208c" + integrity sha512-iJ4/RczbJMkD0712mGktuGpm/U4By4FfDonL7N/9tATGIF4imikjOuagyMY53tnZq3NP6BcmlrHhEKAfGWjh7Q== + dependencies: + "@types/unist" "^2.0.0" + unist-util-is "^4.0.0" + unist-util-visit-parents "^3.0.0" + +universalify@^0.1.0, universalify@^0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66" + integrity sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg== + +universalify@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/universalify/-/universalify-2.0.0.tgz#75a4984efedc4b08975c5aeb73f530d02df25717" + integrity sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ== + +unload@2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/unload/-/unload-2.2.0.tgz#ccc88fdcad345faa06a92039ec0f80b488880ef7" + integrity sha512-B60uB5TNBLtN6/LsgAf3udH9saB5p7gqJwcFfbOEZ8BcBHnGwCf6G/TGiEqkRAxX7zAFIUtzdrXQSdL3Q/wqNA== + dependencies: + "@babel/runtime" "^7.6.2" + detect-node "^2.0.4" + +unpipe@1.0.0, unpipe@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" + integrity sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw= + +unquote@~1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/unquote/-/unquote-1.1.1.tgz#8fded7324ec6e88a0ff8b905e7c098cdc086d544" + integrity sha1-j97XMk7G6IoP+LkF58CYzcCG1UQ= + +upath@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/upath/-/upath-1.2.0.tgz#8f66dbcd55a883acdae4408af8b035a5044c1894" + integrity sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg== + +uri-js@^4.2.2: + version "4.4.1" + resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" + integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== + dependencies: + punycode "^2.1.0" + +url-parse@^1.5.3: + version "1.5.4" + resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.5.4.tgz#e4f645a7e2a0852cc8a66b14b292a3e9a11a97fd" + integrity sha512-ITeAByWWoqutFClc/lRZnFplgXgEZr3WJ6XngMM/N9DMIm4K8zXPCZ1Jdu0rERwO84w1WC5wkle2ubwTA4NTBg== + dependencies: + querystringify "^2.1.1" + requires-port "^1.0.0" + +use-callback-ref@^1.2.3, use-callback-ref@^1.2.5: + version "1.2.5" + resolved "https://registry.yarnpkg.com/use-callback-ref/-/use-callback-ref-1.2.5.tgz#6115ed242cfbaed5915499c0a9842ca2912f38a5" + integrity sha512-gN3vgMISAgacF7sqsLPByqoePooY3n2emTH59Ur5d/M8eg4WTWu1xp8i8DHjohftIyEx0S08RiYxbffr4j8Peg== + +use-memo-one@^1.1.1: + version "1.1.2" + resolved "https://registry.yarnpkg.com/use-memo-one/-/use-memo-one-1.1.2.tgz#0c8203a329f76e040047a35a1197defe342fab20" + integrity sha512-u2qFKtxLsia/r8qG0ZKkbytbztzRb317XCkT7yP8wxL0tZ/CzK2G+WWie5vWvpyeP7+YoPIwbJoIHJ4Ba4k0oQ== + +use-query-params@^1.2.3: + version "1.2.3" + resolved "https://registry.yarnpkg.com/use-query-params/-/use-query-params-1.2.3.tgz#306c31a0cbc714e8a3b4bd7e91a6a9aaccaa5e22" + integrity sha512-cdG0tgbzK+FzsV6DAt2CN8Saa3WpRnze7uC4Rdh7l15epSFq7egmcB/zuREvPNwO5Yk80nUpDZpiyHsoq50d8w== + dependencies: + serialize-query-params "^1.3.5" + +use-sidecar@^1.0.1, use-sidecar@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/use-sidecar/-/use-sidecar-1.0.5.tgz#ffff2a17c1df42e348624b699ba6e5c220527f2b" + integrity sha512-k9jnrjYNwN6xYLj1iaGhonDghfvmeTmYjAiGvOr7clwKfPjMXJf4/HOr7oT5tJwYafgp2tG2l3eZEOfoELiMcA== + dependencies: + detect-node-es "^1.1.0" + tslib "^1.9.3" + +util-deprecate@^1.0.1, util-deprecate@^1.0.2, util-deprecate@~1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" + integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= + +util.promisify@~1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/util.promisify/-/util.promisify-1.0.1.tgz#6baf7774b80eeb0f7520d8b81d07982a59abbaee" + integrity sha512-g9JpC/3He3bm38zsLupWryXHoEcS22YHthuPQSJdMy6KNrzIRzWqcsHzD/WUnqe45whVou4VIsPew37DoXWNrA== + dependencies: + define-properties "^1.1.3" + es-abstract "^1.17.2" + has-symbols "^1.0.1" + object.getownpropertydescriptors "^2.1.0" + +utila@~0.4: + version "0.4.0" + resolved "https://registry.yarnpkg.com/utila/-/utila-0.4.0.tgz#8a16a05d445657a3aea5eecc5b12a4fa5379772c" + integrity sha1-ihagXURWV6Oupe7MWxKk+lN5dyw= + +utils-merge@1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" + integrity sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM= + +uuid@^8.3.0, uuid@^8.3.2: + version "8.3.2" + resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2" + integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg== + +v8-compile-cache@^2.0.3: + version "2.3.0" + resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz#2de19618c66dc247dcfb6f99338035d8245a2cee" + integrity sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA== + +v8-to-istanbul@^8.1.0: + version "8.1.1" + resolved "https://registry.yarnpkg.com/v8-to-istanbul/-/v8-to-istanbul-8.1.1.tgz#77b752fd3975e31bbcef938f85e9bd1c7a8d60ed" + integrity sha512-FGtKtv3xIpR6BYhvgH8MI/y78oT7d8Au3ww4QIxymrCtZEh5b8gCw2siywE+puhEmuWKDtmfrvF5UlB298ut3w== + dependencies: + "@types/istanbul-lib-coverage" "^2.0.1" + convert-source-map "^1.6.0" + source-map "^0.7.3" + +vary@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" + integrity sha1-IpnwLG3tMNSllhsLn3RSShj2NPw= + +vfile-location@^3.0.0, vfile-location@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/vfile-location/-/vfile-location-3.2.0.tgz#d8e41fbcbd406063669ebf6c33d56ae8721d0f3c" + integrity sha512-aLEIZKv/oxuCDZ8lkJGhuhztf/BW4M+iHdCwglA/eWc+vtuRFJj8EtgceYFX4LRjOhCAAiNHsKGssC6onJ+jbA== + +vfile-message@*: + version "3.1.0" + resolved "https://registry.yarnpkg.com/vfile-message/-/vfile-message-3.1.0.tgz#5437035aa43185ff4b9210d32fada6c640e59143" + integrity sha512-4QJbBk+DkPEhBXq3f260xSaWtjE4gPKOfulzfMFF8ZNwaPZieWsg3iVlcmF04+eebzpcpeXOOFMfrYzJHVYg+g== + dependencies: + "@types/unist" "^2.0.0" + unist-util-stringify-position "^3.0.0" + +vfile-message@^2.0.0: + version "2.0.4" + resolved "https://registry.yarnpkg.com/vfile-message/-/vfile-message-2.0.4.tgz#5b43b88171d409eae58477d13f23dd41d52c371a" + integrity sha512-DjssxRGkMvifUOJre00juHoP9DPWuzjxKuMDrhNbk2TdaYYBNMStsNhEOt3idrtI12VQYM/1+iM0KOzXi4pxwQ== + dependencies: + "@types/unist" "^2.0.0" + unist-util-stringify-position "^2.0.0" + +vfile@^4.0.0, vfile@^4.2.0: + version "4.2.1" + resolved "https://registry.yarnpkg.com/vfile/-/vfile-4.2.1.tgz#03f1dce28fc625c625bc6514350fbdb00fa9e624" + integrity sha512-O6AE4OskCG5S1emQ/4gl8zK586RqA3srz3nfK/Viy0UPToBc5Trp9BVFb1u0CjsKrAWwnpr4ifM/KBXPWwJbCA== + dependencies: + "@types/unist" "^2.0.0" + is-buffer "^2.0.0" + unist-util-stringify-position "^2.0.0" + vfile-message "^2.0.0" + +w3c-hr-time@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/w3c-hr-time/-/w3c-hr-time-1.0.2.tgz#0a89cdf5cc15822df9c360543676963e0cc308cd" + integrity sha512-z8P5DvDNjKDoFIHK7q8r8lackT6l+jo/Ye3HOle7l9nICP9lf1Ci25fy9vHd0JOWewkIFzXIEig3TdKT7JQ5fQ== + dependencies: + browser-process-hrtime "^1.0.0" + +w3c-xmlserializer@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/w3c-xmlserializer/-/w3c-xmlserializer-2.0.0.tgz#3e7104a05b75146cc60f564380b7f683acf1020a" + integrity sha512-4tzD0mF8iSiMiNs30BiLO3EpfGLZUT2MSX/G+o7ZywDzliWQ3OPtTZ0PTC3B3ca1UAf4cJMHB+2Bf56EriJuRA== + dependencies: + xml-name-validator "^3.0.0" + +walker@^1.0.7: + version "1.0.8" + resolved "https://registry.yarnpkg.com/walker/-/walker-1.0.8.tgz#bd498db477afe573dc04185f011d3ab8a8d7653f" + integrity sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ== + dependencies: + makeerror "1.0.12" + +watchpack@^2.3.1: + version "2.3.1" + resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-2.3.1.tgz#4200d9447b401156eeca7767ee610f8809bc9d25" + integrity sha512-x0t0JuydIo8qCNctdDrn1OzH/qDzk2+rdCOC3YzumZ42fiMqmQ7T3xQurykYMhYfHaPHTp4ZxAx2NfUo1K6QaA== + dependencies: + glob-to-regexp "^0.4.1" + graceful-fs "^4.1.2" + +wbuf@^1.1.0, wbuf@^1.7.3: + version "1.7.3" + resolved "https://registry.yarnpkg.com/wbuf/-/wbuf-1.7.3.tgz#c1d8d149316d3ea852848895cb6a0bfe887b87df" + integrity sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA== + dependencies: + minimalistic-assert "^1.0.0" + +wcwidth@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/wcwidth/-/wcwidth-1.0.1.tgz#f0b0dcf915bc5ff1528afadb2c0e17b532da2fe8" + integrity sha1-8LDc+RW8X/FSivrbLA4XtTLaL+g= + dependencies: + defaults "^1.0.3" + +web-namespaces@^1.0.0: + version "1.1.4" + resolved "https://registry.yarnpkg.com/web-namespaces/-/web-namespaces-1.1.4.tgz#bc98a3de60dadd7faefc403d1076d529f5e030ec" + integrity sha512-wYxSGajtmoP4WxfejAPIr4l0fVh+jeMXZb08wNc0tMg6xsfZXj3cECqIK0G7ZAqUq0PP8WlMDtaOGVBTAWztNw== + +webidl-conversions@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" + integrity sha1-JFNCdeKnvGvnvIZhHMFq4KVlSHE= + +webidl-conversions@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-4.0.2.tgz#a855980b1f0b6b359ba1d5d9fb39ae941faa63ad" + integrity sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg== + +webidl-conversions@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-5.0.0.tgz#ae59c8a00b121543a2acc65c0434f57b0fc11aff" + integrity sha512-VlZwKPCkYKxQgeSbH5EyngOmRp7Ww7I9rQLERETtf5ofd9pGeswWiOtogpEO850jziPRarreGxn5QIiTqpb2wA== + +webidl-conversions@^6.1.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-6.1.0.tgz#9111b4d7ea80acd40f5270d666621afa78b69514" + integrity sha512-qBIvFLGiBpLjfwmYAaHPXsn+ho5xZnGvyGvsarywGNc8VyQJUMHJ8OBKGGrPER0okBeMDaan4mNBlgBROxuI8w== + +webpack-dev-middleware@^5.3.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-5.3.0.tgz#8fc02dba6e72e1d373eca361623d84610f27be7c" + integrity sha512-MouJz+rXAm9B1OTOYaJnn6rtD/lWZPy2ufQCH3BPs8Rloh/Du6Jze4p7AeLYHkVi0giJnYLaSGDC7S+GM9arhg== + dependencies: + colorette "^2.0.10" + memfs "^3.2.2" + mime-types "^2.1.31" + range-parser "^1.2.1" + schema-utils "^4.0.0" + +webpack-dev-server@^4.6.0: + version "4.7.3" + resolved "https://registry.yarnpkg.com/webpack-dev-server/-/webpack-dev-server-4.7.3.tgz#4e995b141ff51fa499906eebc7906f6925d0beaa" + integrity sha512-mlxq2AsIw2ag016nixkzUkdyOE8ST2GTy34uKSABp1c4nhjZvH90D5ZRR+UOLSsG4Z3TFahAi72a3ymRtfRm+Q== + dependencies: + "@types/bonjour" "^3.5.9" + "@types/connect-history-api-fallback" "^1.3.5" + "@types/serve-index" "^1.9.1" + "@types/sockjs" "^0.3.33" + "@types/ws" "^8.2.2" + ansi-html-community "^0.0.8" + bonjour "^3.5.0" + chokidar "^3.5.2" + colorette "^2.0.10" + compression "^1.7.4" + connect-history-api-fallback "^1.6.0" + default-gateway "^6.0.3" + del "^6.0.0" + express "^4.17.1" + graceful-fs "^4.2.6" + html-entities "^2.3.2" + http-proxy-middleware "^2.0.0" + ipaddr.js "^2.0.1" + open "^8.0.9" + p-retry "^4.5.0" + portfinder "^1.0.28" + schema-utils "^4.0.0" + selfsigned "^2.0.0" + serve-index "^1.9.1" + sockjs "^0.3.21" + spdy "^4.0.2" + strip-ansi "^7.0.0" + webpack-dev-middleware "^5.3.0" + ws "^8.1.0" + +webpack-manifest-plugin@^4.0.2: + version "4.1.1" + resolved "https://registry.yarnpkg.com/webpack-manifest-plugin/-/webpack-manifest-plugin-4.1.1.tgz#10f8dbf4714ff93a215d5a45bcc416d80506f94f" + integrity sha512-YXUAwxtfKIJIKkhg03MKuiFAD72PlrqCiwdwO4VEXdRO5V0ORCNwaOwAZawPZalCbmH9kBDmXnNeQOw+BIEiow== + dependencies: + tapable "^2.0.0" + webpack-sources "^2.2.0" + +webpack-sources@^1.4.3: + version "1.4.3" + resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-1.4.3.tgz#eedd8ec0b928fbf1cbfe994e22d2d890f330a933" + integrity sha512-lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ== + dependencies: + source-list-map "^2.0.0" + source-map "~0.6.1" + +webpack-sources@^2.2.0: + version "2.3.1" + resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-2.3.1.tgz#570de0af163949fe272233c2cefe1b56f74511fd" + integrity sha512-y9EI9AO42JjEcrTJFOYmVywVZdKVUfOvDUPsJea5GIr1JOEGFVqwlY2K098fFoIjOkDzHn2AjRvM8dsBZu+gCA== + dependencies: + source-list-map "^2.0.1" + source-map "^0.6.1" + +webpack-sources@^3.2.3: + version "3.2.3" + resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-3.2.3.tgz#2d4daab8451fd4b240cc27055ff6a0c2ccea0cde" + integrity sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w== + +webpack@^5.64.4: + version "5.67.0" + resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.67.0.tgz#cb43ca2aad5f7cc81c4cd36b626e6b819805dbfd" + integrity sha512-LjFbfMh89xBDpUMgA1W9Ur6Rn/gnr2Cq1jjHFPo4v6a79/ypznSYbAyPgGhwsxBtMIaEmDD1oJoA7BEYw/Fbrw== + dependencies: + "@types/eslint-scope" "^3.7.0" + "@types/estree" "^0.0.50" + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/wasm-edit" "1.11.1" + "@webassemblyjs/wasm-parser" "1.11.1" + acorn "^8.4.1" + acorn-import-assertions "^1.7.6" + browserslist "^4.14.5" + chrome-trace-event "^1.0.2" + enhanced-resolve "^5.8.3" + es-module-lexer "^0.9.0" + eslint-scope "5.1.1" + events "^3.2.0" + glob-to-regexp "^0.4.1" + graceful-fs "^4.2.9" + json-parse-better-errors "^1.0.2" + loader-runner "^4.2.0" + mime-types "^2.1.27" + neo-async "^2.6.2" + schema-utils "^3.1.0" + tapable "^2.1.1" + terser-webpack-plugin "^5.1.3" + watchpack "^2.3.1" + webpack-sources "^3.2.3" + +websocket-driver@>=0.5.1, websocket-driver@^0.7.4: + version "0.7.4" + resolved "https://registry.yarnpkg.com/websocket-driver/-/websocket-driver-0.7.4.tgz#89ad5295bbf64b480abcba31e4953aca706f5760" + integrity sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg== + dependencies: + http-parser-js ">=0.5.1" + safe-buffer ">=5.1.0" + websocket-extensions ">=0.1.1" + +websocket-extensions@>=0.1.1: + version "0.1.4" + resolved "https://registry.yarnpkg.com/websocket-extensions/-/websocket-extensions-0.1.4.tgz#7f8473bc839dfd87608adb95d7eb075211578a42" + integrity sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg== + +whatwg-encoding@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/whatwg-encoding/-/whatwg-encoding-1.0.5.tgz#5abacf777c32166a51d085d6b4f3e7d27113ddb0" + integrity sha512-b5lim54JOPN9HtzvK9HFXvBma/rnfFeqsic0hSpjtDbVxR3dJKLc+KB4V6GgiGOvl7CY/KNh8rxSo9DKQrnUEw== + dependencies: + iconv-lite "0.4.24" + +whatwg-fetch@^3.6.2: + version "3.6.2" + resolved "https://registry.yarnpkg.com/whatwg-fetch/-/whatwg-fetch-3.6.2.tgz#dced24f37f2624ed0281725d51d0e2e3fe677f8c" + integrity sha512-bJlen0FcuU/0EMLrdbJ7zOnW6ITZLrZMIarMUVmdKtsGvZna8vxKYaexICWPfZ8qwf9fzNq+UEIZrnSaApt6RA== + +whatwg-mimetype@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/whatwg-mimetype/-/whatwg-mimetype-2.3.0.tgz#3d4b1e0312d2079879f826aff18dbeeca5960fbf" + integrity sha512-M4yMwr6mAnQz76TbJm914+gPpB/nCwvZbJU28cUD6dR004SAxDLOOSUaB1JDRqLtaOV/vi0IC5lEAGFgrjGv/g== + +whatwg-url@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d" + integrity sha1-lmRU6HZUYuN2RNNib2dCzotwll0= + dependencies: + tr46 "~0.0.3" + webidl-conversions "^3.0.0" + +whatwg-url@^7.0.0: + version "7.1.0" + resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-7.1.0.tgz#c2c492f1eca612988efd3d2266be1b9fc6170d06" + integrity sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg== + dependencies: + lodash.sortby "^4.7.0" + tr46 "^1.0.1" + webidl-conversions "^4.0.2" + +whatwg-url@^8.0.0, whatwg-url@^8.5.0: + version "8.7.0" + resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-8.7.0.tgz#656a78e510ff8f3937bc0bcbe9f5c0ac35941b77" + integrity sha512-gAojqb/m9Q8a5IV96E3fHJM70AzCkgt4uXYX2O7EmuyOnLrViCQlsEBmF9UQIu3/aeAIp2U17rtbpZWNntQqdg== + dependencies: + lodash "^4.7.0" + tr46 "^2.1.0" + webidl-conversions "^6.1.0" + +which-boxed-primitive@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz#13757bc89b209b049fe5d86430e21cf40a89a8e6" + integrity sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg== + dependencies: + is-bigint "^1.0.1" + is-boolean-object "^1.1.0" + is-number-object "^1.0.4" + is-string "^1.0.5" + is-symbol "^1.0.3" + +which@^1.3.1: + version "1.3.1" + resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" + integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== + dependencies: + isexe "^2.0.0" + +which@^2.0.1: + version "2.0.2" + resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" + integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== + dependencies: + isexe "^2.0.0" + +word-wrap@^1.2.3, word-wrap@~1.2.3: + version "1.2.3" + resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" + integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== + +workbox-background-sync@6.4.2: + version "6.4.2" + resolved "https://registry.yarnpkg.com/workbox-background-sync/-/workbox-background-sync-6.4.2.tgz#bb31b95928d376abcb9bde0de3a0cef9bae46cf7" + integrity sha512-P7c8uG5X2k+DMICH9xeSA9eUlCOjHHYoB42Rq+RtUpuwBxUOflAXR1zdsMWj81LopE4gjKXlTw7BFd1BDAHo7g== + dependencies: + idb "^6.1.4" + workbox-core "6.4.2" + +workbox-broadcast-update@6.4.2: + version "6.4.2" + resolved "https://registry.yarnpkg.com/workbox-broadcast-update/-/workbox-broadcast-update-6.4.2.tgz#5094c4767dfb590532ac03ee07e9e82b2ac206bc" + integrity sha512-qnBwQyE0+PWFFc/n4ISXINE49m44gbEreJUYt2ldGH3+CNrLmJ1egJOOyUqqu9R4Eb7QrXcmB34ClXG7S37LbA== + dependencies: + workbox-core "6.4.2" + +workbox-build@6.4.2: + version "6.4.2" + resolved "https://registry.yarnpkg.com/workbox-build/-/workbox-build-6.4.2.tgz#47f9baa946c3491533cd5ccb1f194a7160e8a6e3" + integrity sha512-WMdYLhDIsuzViOTXDH+tJ1GijkFp5khSYolnxR/11zmfhNDtuo7jof72xPGFy+KRpsz6tug39RhivCj77qqO0w== + dependencies: + "@apideck/better-ajv-errors" "^0.3.1" + "@babel/core" "^7.11.1" + "@babel/preset-env" "^7.11.0" + "@babel/runtime" "^7.11.2" + "@rollup/plugin-babel" "^5.2.0" + "@rollup/plugin-node-resolve" "^11.2.1" + "@rollup/plugin-replace" "^2.4.1" + "@surma/rollup-plugin-off-main-thread" "^2.2.3" + ajv "^8.6.0" + common-tags "^1.8.0" + fast-json-stable-stringify "^2.1.0" + fs-extra "^9.0.1" + glob "^7.1.6" + lodash "^4.17.20" + pretty-bytes "^5.3.0" + rollup "^2.43.1" + rollup-plugin-terser "^7.0.0" + source-map "^0.8.0-beta.0" + source-map-url "^0.4.0" + stringify-object "^3.3.0" + strip-comments "^2.0.1" + tempy "^0.6.0" + upath "^1.2.0" + workbox-background-sync "6.4.2" + workbox-broadcast-update "6.4.2" + workbox-cacheable-response "6.4.2" + workbox-core "6.4.2" + workbox-expiration "6.4.2" + workbox-google-analytics "6.4.2" + workbox-navigation-preload "6.4.2" + workbox-precaching "6.4.2" + workbox-range-requests "6.4.2" + workbox-recipes "6.4.2" + workbox-routing "6.4.2" + workbox-strategies "6.4.2" + workbox-streams "6.4.2" + workbox-sw "6.4.2" + workbox-window "6.4.2" + +workbox-cacheable-response@6.4.2: + version "6.4.2" + resolved "https://registry.yarnpkg.com/workbox-cacheable-response/-/workbox-cacheable-response-6.4.2.tgz#ebcabb3667019da232e986a9927af97871e37ccb" + integrity sha512-9FE1W/cKffk1AJzImxgEN0ceWpyz1tqNjZVtA3/LAvYL3AC5SbIkhc7ZCO82WmO9IjTfu8Vut2X/C7ViMSF7TA== + dependencies: + workbox-core "6.4.2" + +workbox-core@6.4.2: + version "6.4.2" + resolved "https://registry.yarnpkg.com/workbox-core/-/workbox-core-6.4.2.tgz#f99fd36a211cc01dce90aa7d5f2c255e8fe9d6bc" + integrity sha512-1U6cdEYPcajRXiboSlpJx6U7TvhIKbxRRerfepAJu2hniKwJ3DHILjpU/zx3yvzSBCWcNJDoFalf7Vgd7ey/rw== + +workbox-expiration@6.4.2: + version "6.4.2" + resolved "https://registry.yarnpkg.com/workbox-expiration/-/workbox-expiration-6.4.2.tgz#61613459fd6ddd1362730767618d444c6b9c9139" + integrity sha512-0hbpBj0tDnW+DZOUmwZqntB/8xrXOgO34i7s00Si/VlFJvvpRKg1leXdHHU8ykoSBd6+F2KDcMP3swoCi5guLw== + dependencies: + idb "^6.1.4" + workbox-core "6.4.2" + +workbox-google-analytics@6.4.2: + version "6.4.2" + resolved "https://registry.yarnpkg.com/workbox-google-analytics/-/workbox-google-analytics-6.4.2.tgz#eea7d511b3078665a726dc2ee9f11c6b7a897530" + integrity sha512-u+gxs3jXovPb1oul4CTBOb+T9fS1oZG+ZE6AzS7l40vnyfJV79DaLBvlpEZfXGv3CjMdV1sT/ltdOrKzo7HcGw== + dependencies: + workbox-background-sync "6.4.2" + workbox-core "6.4.2" + workbox-routing "6.4.2" + workbox-strategies "6.4.2" + +workbox-navigation-preload@6.4.2: + version "6.4.2" + resolved "https://registry.yarnpkg.com/workbox-navigation-preload/-/workbox-navigation-preload-6.4.2.tgz#35cd4ba416a530796af135410ca07db5bee11668" + integrity sha512-viyejlCtlKsbJCBHwhSBbWc57MwPXvUrc8P7d+87AxBGPU+JuWkT6nvBANgVgFz6FUhCvRC8aYt+B1helo166g== + dependencies: + workbox-core "6.4.2" + +workbox-precaching@6.4.2: + version "6.4.2" + resolved "https://registry.yarnpkg.com/workbox-precaching/-/workbox-precaching-6.4.2.tgz#8d87c05d54f32ac140f549faebf3b4d42d63621e" + integrity sha512-CZ6uwFN/2wb4noHVlALL7UqPFbLfez/9S2GAzGAb0Sk876ul9ukRKPJJ6gtsxfE2HSTwqwuyNVa6xWyeyJ1XSA== + dependencies: + workbox-core "6.4.2" + workbox-routing "6.4.2" + workbox-strategies "6.4.2" + +workbox-range-requests@6.4.2: + version "6.4.2" + resolved "https://registry.yarnpkg.com/workbox-range-requests/-/workbox-range-requests-6.4.2.tgz#050f0dfbb61cd1231e609ed91298b6c2442ae41b" + integrity sha512-SowF3z69hr3Po/w7+xarWfzxJX/3Fo0uSG72Zg4g5FWWnHpq2zPvgbWerBZIa81zpJVUdYpMa3akJJsv+LaO1Q== + dependencies: + workbox-core "6.4.2" + +workbox-recipes@6.4.2: + version "6.4.2" + resolved "https://registry.yarnpkg.com/workbox-recipes/-/workbox-recipes-6.4.2.tgz#68de41fa3a77b444b0f93c9c01a76ba1d41fd2bf" + integrity sha512-/oVxlZFpAjFVbY+3PoGEXe8qyvtmqMrTdWhbOfbwokNFtUZ/JCtanDKgwDv9x3AebqGAoJRvQNSru0F4nG+gWA== + dependencies: + workbox-cacheable-response "6.4.2" + workbox-core "6.4.2" + workbox-expiration "6.4.2" + workbox-precaching "6.4.2" + workbox-routing "6.4.2" + workbox-strategies "6.4.2" + +workbox-routing@6.4.2: + version "6.4.2" + resolved "https://registry.yarnpkg.com/workbox-routing/-/workbox-routing-6.4.2.tgz#65b1c61e8ca79bb9152f93263c26b1f248d09dcc" + integrity sha512-0ss/n9PAcHjTy4Ad7l2puuod4WtsnRYu9BrmHcu6Dk4PgWeJo1t5VnGufPxNtcuyPGQ3OdnMdlmhMJ57sSrrSw== + dependencies: + workbox-core "6.4.2" + +workbox-strategies@6.4.2: + version "6.4.2" + resolved "https://registry.yarnpkg.com/workbox-strategies/-/workbox-strategies-6.4.2.tgz#50c02bf2d116918e1a8052df5f2c1e4103c62d5d" + integrity sha512-YXh9E9dZGEO1EiPC3jPe2CbztO5WT8Ruj8wiYZM56XqEJp5YlGTtqRjghV+JovWOqkWdR+amJpV31KPWQUvn1Q== + dependencies: + workbox-core "6.4.2" + +workbox-streams@6.4.2: + version "6.4.2" + resolved "https://registry.yarnpkg.com/workbox-streams/-/workbox-streams-6.4.2.tgz#3bc615cccebfd62dedf28315afb7d9ee177912a5" + integrity sha512-ROEGlZHGVEgpa5bOZefiJEVsi5PsFjJG9Xd+wnDbApsCO9xq9rYFopF+IRq9tChyYzhBnyk2hJxbQVWphz3sog== + dependencies: + workbox-core "6.4.2" + workbox-routing "6.4.2" + +workbox-sw@6.4.2: + version "6.4.2" + resolved "https://registry.yarnpkg.com/workbox-sw/-/workbox-sw-6.4.2.tgz#9a6db5f74580915dc2f0dbd47d2ffe057c94a795" + integrity sha512-A2qdu9TLktfIM5NE/8+yYwfWu+JgDaCkbo5ikrky2c7r9v2X6DcJ+zSLphNHHLwM/0eVk5XVf1mC5HGhYpMhhg== + +workbox-webpack-plugin@^6.4.1: + version "6.4.2" + resolved "https://registry.yarnpkg.com/workbox-webpack-plugin/-/workbox-webpack-plugin-6.4.2.tgz#aad9f11b028786d5b781420e68f4e8f570ea9936" + integrity sha512-CiEwM6kaJRkx1cP5xHksn13abTzUqMHiMMlp5Eh/v4wRcedgDTyv6Uo8+Hg9MurRbHDosO5suaPyF9uwVr4/CQ== + dependencies: + fast-json-stable-stringify "^2.1.0" + pretty-bytes "^5.4.1" + source-map-url "^0.4.0" + upath "^1.2.0" + webpack-sources "^1.4.3" + workbox-build "6.4.2" + +workbox-window@6.4.2: + version "6.4.2" + resolved "https://registry.yarnpkg.com/workbox-window/-/workbox-window-6.4.2.tgz#5319a3e343fa1e4bd15a1f53a07b58999d064c8a" + integrity sha512-KVyRKmrJg7iB+uym/B/CnEUEFG9CvnTU1Bq5xpXHbtgD9l+ShDekSl1wYpqw/O0JfeeQVOFb8CiNfvnwWwqnWQ== + dependencies: + "@types/trusted-types" "^2.0.2" + workbox-core "6.4.2" + +wrap-ansi@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" + integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== + dependencies: + ansi-styles "^4.0.0" + string-width "^4.1.0" + strip-ansi "^6.0.0" + +wrappy@1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= + +write-file-atomic@^3.0.0: + version "3.0.3" + resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-3.0.3.tgz#56bd5c5a5c70481cd19c571bd39ab965a5de56e8" + integrity sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q== + dependencies: + imurmurhash "^0.1.4" + is-typedarray "^1.0.0" + signal-exit "^3.0.2" + typedarray-to-buffer "^3.1.5" + +ws@^7.4.6: + version "7.5.6" + resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.6.tgz#e59fc509fb15ddfb65487ee9765c5a51dec5fe7b" + integrity sha512-6GLgCqo2cy2A2rjCNFlxQS6ZljG/coZfZXclldI8FB/1G3CCI36Zd8xy2HrFVACi8tfk5XrgLQEk+P0Tnz9UcA== + +ws@^8.1.0: + version "8.4.2" + resolved "https://registry.yarnpkg.com/ws/-/ws-8.4.2.tgz#18e749868d8439f2268368829042894b6907aa0b" + integrity sha512-Kbk4Nxyq7/ZWqr/tarI9yIt/+iNNFOjBXEWgTb4ydaNHBNGgvf2QHbS9fdfsndfjFlFwEd4Al+mw83YkaD10ZA== + +xml-name-validator@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/xml-name-validator/-/xml-name-validator-3.0.0.tgz#6ae73e06de4d8c6e47f9fb181f78d648ad457c6a" + integrity sha512-A5CUptxDsvxKJEU3yO6DuWBSJz/qizqzJKOMIfUJHETbBw/sFaDxgd6fxm1ewUaM0jZ444Fc5vC5ROYurg/4Pw== + +xmlchars@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/xmlchars/-/xmlchars-2.2.0.tgz#060fe1bcb7f9c76fe2a17db86a9bc3ab894210cb" + integrity sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw== + +xtend@^4.0.0, xtend@^4.0.1, xtend@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" + integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== + +y18n@^5.0.5: + version "5.0.8" + resolved "https://registry.yarnpkg.com/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55" + integrity sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA== + +yallist@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" + integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== + +yaml@^1.10.0, yaml@^1.10.2, yaml@^1.7.2: + version "1.10.2" + resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.2.tgz#2301c5ffbf12b467de8da2333a459e29e7920e4b" + integrity sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg== + +yargs-parser@^20.2.2: + version "20.2.9" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.9.tgz#2eb7dc3b0289718fc295f362753845c41a0c94ee" + integrity sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w== + +yargs-parser@^21.0.0: + version "21.0.0" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-21.0.0.tgz#a485d3966be4317426dd56bdb6a30131b281dc55" + integrity sha512-z9kApYUOCwoeZ78rfRYYWdiU/iNL6mwwYlkkZfJoyMR1xps+NEBX5X7XmRpxkZHhXJ6+Ey00IwKxBBSW9FIjyA== + +yargs@^16.2.0: + version "16.2.0" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-16.2.0.tgz#1c82bf0f6b6a66eafce7ef30e376f49a12477f66" + integrity sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw== + dependencies: + cliui "^7.0.2" + escalade "^3.1.1" + get-caller-file "^2.0.5" + require-directory "^2.1.1" + string-width "^4.2.0" + y18n "^5.0.5" + yargs-parser "^20.2.2" + +yargs@^17.3.0: + version "17.3.1" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-17.3.1.tgz#da56b28f32e2fd45aefb402ed9c26f42be4c07b9" + integrity sha512-WUANQeVgjLbNsEmGk20f+nlHgOqzRFpiGWVaBrYGYIGANIIu3lWjoyi0fNlFmJkvfhCZ6BXINe7/W2O2bV4iaA== + dependencies: + cliui "^7.0.2" + escalade "^3.1.1" + get-caller-file "^2.0.5" + require-directory "^2.1.1" + string-width "^4.2.3" + y18n "^5.0.5" + yargs-parser "^21.0.0" + +yocto-queue@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b" + integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q== + +zod@^3.11.6: + version "3.11.6" + resolved "https://registry.yarnpkg.com/zod/-/zod-3.11.6.tgz#e43a5e0c213ae2e02aefe7cb2b1a6fa3d7f1f483" + integrity sha512-daZ80A81I3/9lIydI44motWe6n59kRBfNzTuS2bfzVh1nAXi667TOTWWtatxyG+fwgNUiagSj/CWZwRRbevJIg== + +zwitch@^1.0.0: + version "1.0.5" + resolved "https://registry.yarnpkg.com/zwitch/-/zwitch-1.0.5.tgz#d11d7381ffed16b742f6af7b3f223d5cd9fe9920" + integrity sha512-V50KMwwzqJV0NpZIZFwfOD5/lyny3WlSzRiXgA0G7VUnRlqttta1L6UQIHzd6EuBY/cHGfwTIck7w1yH6Q5zUw==